From a6e995c093de8aae2e91a0787281bb34c0b871eb Mon Sep 17 00:00:00 2001 From: Unit 193 Date: Thu, 31 Jul 2025 01:22:01 -0400 Subject: New upstream version 1.30.2. --- data/completion/_gallery-dl | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) (limited to 'data/completion/_gallery-dl') diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl index 99fb8ad..f0d654e 100644 --- a/data/completion/_gallery-dl +++ b/data/completion/_gallery-dl @@ -13,6 +13,7 @@ _arguments -s -S \ {-X,--extractors}'[Load external extractors from PATH]':'' \ --user-agent'[User-Agent request header]':'' \ --clear-cache'[Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)]':'' \ +--compat'[Restore legacy '\''category'\'' names]' \ {-U,--update-check}'[Check if a newer version is available]' \ {-i,--input-file}'[Download URLs found in FILE ('\''-'\'' for stdin). More than one --input-file can be specified]':'':_files \ {-I,--input-file-comment}'[Download URLs found in FILE. Comment them out after they were downloaded successfully.]':'':_files \ @@ -29,8 +30,10 @@ _arguments -s -S \ {-E,--extractor-info}'[Print extractor defaults and settings]' \ {-K,--list-keywords}'[Print a list of available keywords and example values for the given URLs]' \ {-e,--error-file}'[Add input URLs which returned an error to FILE]':'':_files \ -{-N,--print}'[Write FORMAT during EVENT (default '\''prepare'\'') to standard output. Examples: '\''id'\'' or '\''post:{md5\[:8\]}'\'']':'<[event:]format>' \ ---print-to-file'[Append FORMAT during EVENT to FILE]':'<[event:]format file>' \ +{-N,--print}'[Write FORMAT during EVENT (default '\''prepare'\'') to standard output instead of downloading files. Can be used multiple times. Examples: '\''id'\'' or '\''post:{md5\[:8\]}'\'']':'<[event:]format>' \ +--Print'[Like --print, but downloads files as well]':'<[event:]format>' \ +--print-to-file'[Append FORMAT during EVENT to FILE instead of downloading files. Can be used multiple times]':'<[event:]format file>' \ +--Print-to-file'[Like --print-to-file, but downloads files as well]':'<[event:]format file>' \ --list-modules'[Print a list of available extractor modules]' \ --list-extractors'[Print a list of extractor classes with description, (sub)category and example URL]':'<[categories]>' \ --write-log'[Write logging output to FILE]':'':_files \ @@ -45,10 +48,11 @@ _arguments -s -S \ {-4,--force-ipv4}'[Make all connections via IPv4]' \ {-6,--force-ipv6}'[Make all connections via IPv6]' \ --no-check-certificate'[Disable HTTPS certificate validation]' \ -{-r,--limit-rate}'[Maximum download rate (e.g. 500k or 2.5M)]':'' \ +{-r,--limit-rate}'[Maximum download rate (e.g. 500k, 2.5M, or 800k-2M)]':'' \ --chunk-size'[Size of in-memory data chunks (default: 32k)]':'' \ --sleep'[Number of seconds to wait before each download. This can be either a constant value or a range (e.g. 2.7 or 2.0-3.5)]':'' \ --sleep-request'[Number of seconds to wait between HTTP requests during data extraction]':'' \ +--sleep-429'[Number of seconds to wait when receiving a '\''429 Too Many Requests'\'' response]':'' \ --sleep-extractor'[Number of seconds to wait before starting data extraction for an input URL]':'' \ --no-part'[Do not use .part files]' \ --no-skip'[Do not skip downloads; overwrite existing files]' \ @@ -72,7 +76,7 @@ _arguments -s -S \ {-T,--terminate}'[Stop current and parent extractor runs after N consecutive file downloads were skipped]':'' \ --filesize-min'[Do not download files smaller than SIZE (e.g. 500k or 2.5M)]':'' \ --filesize-max'[Do not download files larger than SIZE (e.g. 500k or 2.5M)]':'' \ ---download-archive'[Record all downloaded or skipped files in FILE and skip downloading any file already in it]':'':_files \ +--download-archive'[Record successfully downloaded files in FILE and skip downloading any file already in it]':'':_files \ --range'[Index range(s) specifying which files to download. These can be either a constant value, range, or slice (e.g. '\''5'\'', '\''8-20'\'', or '\''1:24:3'\'')]':'' \ --chapter-range'[Like '\''--range'\'', but applies to manga chapters and other delegated URLs]':'' \ --filter'[Python expression controlling which files to download. Files for which the expression evaluates to False are ignored. Available keys are the filename-specific ones listed by '\''-K'\''. Example: --filter "image_width >= 1000 and rating in ('\''s'\'', '\''q'\'')"]':'' \ -- cgit v1.2.3