aboutsummaryrefslogtreecommitdiffstats
path: root/data/completion
diff options
context:
space:
mode:
Diffstat (limited to 'data/completion')
-rw-r--r--data/completion/_gallery-dl12
-rw-r--r--data/completion/gallery-dl2
-rw-r--r--data/completion/gallery-dl.fish12
3 files changed, 17 insertions, 9 deletions
diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl
index 99fb8ad..f0d654e 100644
--- a/data/completion/_gallery-dl
+++ b/data/completion/_gallery-dl
@@ -13,6 +13,7 @@ _arguments -s -S \
{-X,--extractors}'[Load external extractors from PATH]':'<path>' \
--user-agent'[User-Agent request header]':'<ua>' \
--clear-cache'[Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)]':'<module>' \
+--compat'[Restore legacy '\''category'\'' names]' \
{-U,--update-check}'[Check if a newer version is available]' \
{-i,--input-file}'[Download URLs found in FILE ('\''-'\'' for stdin). More than one --input-file can be specified]':'<file>':_files \
{-I,--input-file-comment}'[Download URLs found in FILE. Comment them out after they were downloaded successfully.]':'<file>':_files \
@@ -29,8 +30,10 @@ _arguments -s -S \
{-E,--extractor-info}'[Print extractor defaults and settings]' \
{-K,--list-keywords}'[Print a list of available keywords and example values for the given URLs]' \
{-e,--error-file}'[Add input URLs which returned an error to FILE]':'<file>':_files \
-{-N,--print}'[Write FORMAT during EVENT (default '\''prepare'\'') to standard output. Examples: '\''id'\'' or '\''post:{md5\[:8\]}'\'']':'<[event:]format>' \
---print-to-file'[Append FORMAT during EVENT to FILE]':'<[event:]format file>' \
+{-N,--print}'[Write FORMAT during EVENT (default '\''prepare'\'') to standard output instead of downloading files. Can be used multiple times. Examples: '\''id'\'' or '\''post:{md5\[:8\]}'\'']':'<[event:]format>' \
+--Print'[Like --print, but downloads files as well]':'<[event:]format>' \
+--print-to-file'[Append FORMAT during EVENT to FILE instead of downloading files. Can be used multiple times]':'<[event:]format file>' \
+--Print-to-file'[Like --print-to-file, but downloads files as well]':'<[event:]format file>' \
--list-modules'[Print a list of available extractor modules]' \
--list-extractors'[Print a list of extractor classes with description, (sub)category and example URL]':'<[categories]>' \
--write-log'[Write logging output to FILE]':'<file>':_files \
@@ -45,10 +48,11 @@ _arguments -s -S \
{-4,--force-ipv4}'[Make all connections via IPv4]' \
{-6,--force-ipv6}'[Make all connections via IPv6]' \
--no-check-certificate'[Disable HTTPS certificate validation]' \
-{-r,--limit-rate}'[Maximum download rate (e.g. 500k or 2.5M)]':'<rate>' \
+{-r,--limit-rate}'[Maximum download rate (e.g. 500k, 2.5M, or 800k-2M)]':'<rate>' \
--chunk-size'[Size of in-memory data chunks (default: 32k)]':'<size>' \
--sleep'[Number of seconds to wait before each download. This can be either a constant value or a range (e.g. 2.7 or 2.0-3.5)]':'<seconds>' \
--sleep-request'[Number of seconds to wait between HTTP requests during data extraction]':'<seconds>' \
+--sleep-429'[Number of seconds to wait when receiving a '\''429 Too Many Requests'\'' response]':'<seconds>' \
--sleep-extractor'[Number of seconds to wait before starting data extraction for an input URL]':'<seconds>' \
--no-part'[Do not use .part files]' \
--no-skip'[Do not skip downloads; overwrite existing files]' \
@@ -72,7 +76,7 @@ _arguments -s -S \
{-T,--terminate}'[Stop current and parent extractor runs after N consecutive file downloads were skipped]':'<n>' \
--filesize-min'[Do not download files smaller than SIZE (e.g. 500k or 2.5M)]':'<size>' \
--filesize-max'[Do not download files larger than SIZE (e.g. 500k or 2.5M)]':'<size>' \
---download-archive'[Record all downloaded or skipped files in FILE and skip downloading any file already in it]':'<file>':_files \
+--download-archive'[Record successfully downloaded files in FILE and skip downloading any file already in it]':'<file>':_files \
--range'[Index range(s) specifying which files to download. These can be either a constant value, range, or slice (e.g. '\''5'\'', '\''8-20'\'', or '\''1:24:3'\'')]':'<range>' \
--chapter-range'[Like '\''--range'\'', but applies to manga chapters and other delegated URLs]':'<range>' \
--filter'[Python expression controlling which files to download. Files for which the expression evaluates to False are ignored. Available keys are the filename-specific ones listed by '\''-K'\''. Example: --filter "image_width >= 1000 and rating in ('\''s'\'', '\''q'\'')"]':'<expr>' \
diff --git a/data/completion/gallery-dl b/data/completion/gallery-dl
index 161113c..ae4cb0f 100644
--- a/data/completion/gallery-dl
+++ b/data/completion/gallery-dl
@@ -10,7 +10,7 @@ _gallery_dl()
elif [[ "${prev}" =~ ^()$ ]]; then
COMPREPLY=( $(compgen -d -- "${cur}") )
else
- COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --resolve-json --simulate --extractor-info --list-keywords --error-file --print --print-to-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --print-traffic --no-colors --retries --http-timeout --proxy --source-address --force-ipv4 --force-ipv6 --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --rename --rename-to --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") )
+ COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --compat --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --resolve-json --simulate --extractor-info --list-keywords --error-file --print --Print --print-to-file --Print-to-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --print-traffic --no-colors --retries --http-timeout --proxy --source-address --force-ipv4 --force-ipv6 --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-429 --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --rename --rename-to --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") )
fi
}
diff --git a/data/completion/gallery-dl.fish b/data/completion/gallery-dl.fish
index f8bb723..8eb427a 100644
--- a/data/completion/gallery-dl.fish
+++ b/data/completion/gallery-dl.fish
@@ -7,6 +7,7 @@ complete -c gallery-dl -x -a '(__fish_complete_directories)' -s 'D' -l 'director
complete -c gallery-dl -x -a '(__fish_complete_directories)' -s 'X' -l 'extractors' -d 'Load external extractors from PATH'
complete -c gallery-dl -x -l 'user-agent' -d 'User-Agent request header'
complete -c gallery-dl -x -l 'clear-cache' -d 'Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)'
+complete -c gallery-dl -l 'compat' -d 'Restore legacy "category" names'
complete -c gallery-dl -s 'U' -l 'update-check' -d 'Check if a newer version is available'
complete -c gallery-dl -r -F -s 'i' -l 'input-file' -d 'Download URLs found in FILE ("-" for stdin). More than one --input-file can be specified'
complete -c gallery-dl -r -F -s 'I' -l 'input-file-comment' -d 'Download URLs found in FILE. Comment them out after they were downloaded successfully.'
@@ -23,8 +24,10 @@ complete -c gallery-dl -s 's' -l 'simulate' -d 'Simulate data extraction; do not
complete -c gallery-dl -s 'E' -l 'extractor-info' -d 'Print extractor defaults and settings'
complete -c gallery-dl -s 'K' -l 'list-keywords' -d 'Print a list of available keywords and example values for the given URLs'
complete -c gallery-dl -r -F -s 'e' -l 'error-file' -d 'Add input URLs which returned an error to FILE'
-complete -c gallery-dl -x -s 'N' -l 'print' -d 'Write FORMAT during EVENT (default "prepare") to standard output. Examples: "id" or "post:{md5[:8]}"'
-complete -c gallery-dl -x -l 'print-to-file' -d 'Append FORMAT during EVENT to FILE'
+complete -c gallery-dl -x -s 'N' -l 'print' -d 'Write FORMAT during EVENT (default "prepare") to standard output instead of downloading files. Can be used multiple times. Examples: "id" or "post:{md5[:8]}"'
+complete -c gallery-dl -x -l 'Print' -d 'Like --print, but downloads files as well'
+complete -c gallery-dl -x -l 'print-to-file' -d 'Append FORMAT during EVENT to FILE instead of downloading files. Can be used multiple times'
+complete -c gallery-dl -x -l 'Print-to-file' -d 'Like --print-to-file, but downloads files as well'
complete -c gallery-dl -l 'list-modules' -d 'Print a list of available extractor modules'
complete -c gallery-dl -x -l 'list-extractors' -d 'Print a list of extractor classes with description, (sub)category and example URL'
complete -c gallery-dl -r -F -l 'write-log' -d 'Write logging output to FILE'
@@ -39,10 +42,11 @@ complete -c gallery-dl -x -l 'source-address' -d 'Client-side IP address to bind
complete -c gallery-dl -s '4' -l 'force-ipv4' -d 'Make all connections via IPv4'
complete -c gallery-dl -s '6' -l 'force-ipv6' -d 'Make all connections via IPv6'
complete -c gallery-dl -l 'no-check-certificate' -d 'Disable HTTPS certificate validation'
-complete -c gallery-dl -x -s 'r' -l 'limit-rate' -d 'Maximum download rate (e.g. 500k or 2.5M)'
+complete -c gallery-dl -x -s 'r' -l 'limit-rate' -d 'Maximum download rate (e.g. 500k, 2.5M, or 800k-2M)'
complete -c gallery-dl -x -l 'chunk-size' -d 'Size of in-memory data chunks (default: 32k)'
complete -c gallery-dl -x -l 'sleep' -d 'Number of seconds to wait before each download. This can be either a constant value or a range (e.g. 2.7 or 2.0-3.5)'
complete -c gallery-dl -x -l 'sleep-request' -d 'Number of seconds to wait between HTTP requests during data extraction'
+complete -c gallery-dl -x -l 'sleep-429' -d 'Number of seconds to wait when receiving a "429 Too Many Requests" response'
complete -c gallery-dl -x -l 'sleep-extractor' -d 'Number of seconds to wait before starting data extraction for an input URL'
complete -c gallery-dl -l 'no-part' -d 'Do not use .part files'
complete -c gallery-dl -l 'no-skip' -d 'Do not skip downloads; overwrite existing files'
@@ -67,7 +71,7 @@ complete -c gallery-dl -x -s 'A' -l 'abort' -d 'Stop current extractor run after
complete -c gallery-dl -x -s 'T' -l 'terminate' -d 'Stop current and parent extractor runs after N consecutive file downloads were skipped'
complete -c gallery-dl -x -l 'filesize-min' -d 'Do not download files smaller than SIZE (e.g. 500k or 2.5M)'
complete -c gallery-dl -x -l 'filesize-max' -d 'Do not download files larger than SIZE (e.g. 500k or 2.5M)'
-complete -c gallery-dl -r -F -l 'download-archive' -d 'Record all downloaded or skipped files in FILE and skip downloading any file already in it'
+complete -c gallery-dl -r -F -l 'download-archive' -d 'Record successfully downloaded files in FILE and skip downloading any file already in it'
complete -c gallery-dl -x -l 'range' -d 'Index range(s) specifying which files to download. These can be either a constant value, range, or slice (e.g. "5", "8-20", or "1:24:3")'
complete -c gallery-dl -x -l 'chapter-range' -d 'Like "--range", but applies to manga chapters and other delegated URLs'
complete -c gallery-dl -x -l 'filter' -d 'Python expression controlling which files to download. Files for which the expression evaluates to False are ignored. Available keys are the filename-specific ones listed by "-K". Example: --filter "image_width >= 1000 and rating in ("s", "q")"'