aboutsummaryrefslogtreecommitdiffstats
path: root/data/completion
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2021-06-22 22:30:36 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2021-06-22 22:30:36 -0400
commit32de2b06db501c7de81678bce8e3e0c3e63d340c (patch)
treefd58a26618a73de0faaf3e9c435a806aed7eced3 /data/completion
parent8a644b7a06c504263a478d3681eed10b4161b5be (diff)
New upstream version 1.18.0.upstream/1.18.0
Diffstat (limited to 'data/completion')
-rw-r--r--data/completion/_gallery-dl10
-rw-r--r--data/completion/gallery-dl2
2 files changed, 7 insertions, 5 deletions
diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl
index 436260b..15806e8 100644
--- a/data/completion/_gallery-dl
+++ b/data/completion/_gallery-dl
@@ -11,7 +11,7 @@ _arguments -C -S \
{-i,--input-file}'[Download URLs found in FILE ("-" for stdin). More than one --input-file can be specified]':'<file>':_files \
--cookies'[File to load additional cookies from]':'<file>':_files \
--proxy'[Use the specified proxy]':'<url>' \
---clear-cache'[Delete all cached login sessions, cookies, etc.]':'<module>' \
+--clear-cache'[Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)]':'<module>' \
{-q,--quiet}'[Activate quiet mode]' \
{-v,--verbose}'[Print various debugging information]' \
{-g,--get-urls}'[Print URLs instead of downloading]' \
@@ -27,7 +27,6 @@ _arguments -C -S \
--write-pages'[Write downloaded intermediary pages to files in the current directory to debug problems]' \
{-r,--limit-rate}'[Maximum download rate (e.g. 500k or 2.5M)]':'<rate>' \
{-R,--retries}'[Maximum number of retries for failed HTTP requests or -1 for infinite retries (default: 4)]':'<n>' \
-{-A,--abort}'[Abort extractor run after N consecutive file downloads have been skipped, e.g. if files with the same filename already exist]':'<n>' \
--http-timeout'[Timeout for HTTP connections (default: 30.0)]':'<seconds>' \
--sleep'[Number of seconds to sleep before each download]':'<seconds>' \
--filesize-min'[Do not download files smaller than SIZE (e.g. 500k or 2.5M)]':'<size>' \
@@ -44,7 +43,9 @@ _arguments -C -S \
{-u,--username}'[Username to login with]':'<user>' \
{-p,--password}'[Password belonging to the given username]':'<pass>' \
--netrc'[Enable .netrc authentication data]' \
---download-archive'[Record all downloaded files in the archive file and skip downloading any file already in it.]':'<file>':_files \
+--download-archive'[Record all downloaded files in the archive file and skip downloading any file already in it]':'<file>':_files \
+{-A,--abort}'[Stop current extractor run after N consecutive file downloads were skipped]':'<n>' \
+{-T,--terminate}'[Stop current and parent extractor runs after N consecutive file downloads were skipped]':'<n>' \
--range'[Index-range(s) specifying which images to download. For example "5-10" or "1,3-5,10-"]':'<range>' \
--chapter-range'[Like "--range", but applies to manga-chapters and other delegated URLs]':'<range>' \
--filter'[Python expression controlling which images to download. Files for which the expression evaluates to False are ignored. Available keys are the filename-specific ones listed by "-K". Example: --filter "image_width >= 1000 and rating in ("s", "q")"]':'<expr>' \
@@ -56,6 +57,7 @@ _arguments -C -S \
--write-tags'[Write image tags to separate text files]' \
--mtime-from-date'[Set file modification times according to "date" metadata]' \
--exec'[Execute CMD for each downloaded file. Example: --exec "convert {} {}.png && rm {}"]':'<cmd>' \
---exec-after'[Execute CMD after all files were downloaded successfully. Example: --exec-after "cd {} && convert * ../doc.pdf"]':'<cmd>' && rc=0
+--exec-after'[Execute CMD after all files were downloaded successfully. Example: --exec-after "cd {} && convert * ../doc.pdf"]':'<cmd>' \
+{-P,--postprocessor}'[Activate the specified post processor]':'<name>' && rc=0
return rc
diff --git a/data/completion/gallery-dl b/data/completion/gallery-dl
index 9a3a63e..f3d1100 100644
--- a/data/completion/gallery-dl
+++ b/data/completion/gallery-dl
@@ -10,7 +10,7 @@ _gallery_dl()
elif [[ "${prev}" =~ ^(-d|--dest)$ ]]; then
COMPREPLY=( $(compgen -d -- "${cur}") )
else
- COMPREPLY=( $(compgen -W "--help --version --dest --input-file --cookies --proxy --clear-cache --quiet --verbose --get-urls --resolve-urls --dump-json --simulate --extractor-info --list-keywords --list-modules --list-extractors --write-log --write-unsupported --write-pages --limit-rate --retries --abort --http-timeout --sleep --filesize-min --filesize-max --no-part --no-skip --no-mtime --no-download --no-check-certificate --config --config-yaml --option --ignore-config --username --password --netrc --download-archive --range --chapter-range --filter --chapter-filter --zip --ugoira-conv --ugoira-conv-lossless --write-metadata --write-tags --mtime-from-date --exec --exec-after" -- "${cur}") )
+ COMPREPLY=( $(compgen -W "--help --version --dest --input-file --cookies --proxy --clear-cache --quiet --verbose --get-urls --resolve-urls --dump-json --simulate --extractor-info --list-keywords --list-modules --list-extractors --write-log --write-unsupported --write-pages --limit-rate --retries --http-timeout --sleep --filesize-min --filesize-max --no-part --no-skip --no-mtime --no-download --no-check-certificate --config --config-yaml --option --ignore-config --username --password --netrc --download-archive --abort --terminate --range --chapter-range --filter --chapter-filter --zip --ugoira-conv --ugoira-conv-lossless --write-metadata --write-tags --mtime-from-date --exec --exec-after --postprocessor" -- "${cur}") )
fi
}