diff options
Diffstat (limited to 'data')
| -rw-r--r-- | data/completion/_gallery-dl | 2 | ||||
| -rw-r--r-- | data/completion/gallery-dl | 2 | ||||
| -rw-r--r-- | data/completion/gallery-dl.fish | 2 | ||||
| -rw-r--r-- | data/man/gallery-dl.1 | 8 | ||||
| -rw-r--r-- | data/man/gallery-dl.conf.5 | 198 |
5 files changed, 196 insertions, 16 deletions
diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl index 14b7321..3308e98 100644 --- a/data/completion/_gallery-dl +++ b/data/completion/_gallery-dl @@ -26,6 +26,7 @@ _arguments -s -S \ {-g,--get-urls}'[Print URLs instead of downloading]' \ {-G,--resolve-urls}'[Print URLs instead of downloading; resolve intermediary URLs]' \ {-j,--dump-json}'[Print JSON information]' \ +{-J,--resolve-json}'[Print JSON information; resolve intermediary URLs]' \ {-s,--simulate}'[Simulate data extraction; do not download anything]' \ {-E,--extractor-info}'[Print extractor defaults and settings]' \ {-K,--list-keywords}'[Print a list of available keywords and example values for the given URLs]' \ @@ -35,6 +36,7 @@ _arguments -s -S \ --write-log'[Write logging output to FILE]':'<file>':_files \ --write-unsupported'[Write URLs, which get emitted by other extractors but cannot be handled, to FILE]':'<file>':_files \ --write-pages'[Write downloaded intermediary pages to files in the current directory to debug problems]' \ +--print-traffic'[Display sent and read HTTP traffic]' \ --no-colors'[Do not emit ANSI color codes in output]' \ {-R,--retries}'[Maximum number of retries for failed HTTP requests or -1 for infinite retries (default: 4)]':'<n>' \ --http-timeout'[Timeout for HTTP connections (default: 30.0)]':'<seconds>' \ diff --git a/data/completion/gallery-dl b/data/completion/gallery-dl index 625ecd6..0d933fa 100644 --- a/data/completion/gallery-dl +++ b/data/completion/gallery-dl @@ -10,7 +10,7 @@ _gallery_dl() elif [[ "${prev}" =~ ^()$ ]]; then COMPREPLY=( $(compgen -d -- "${cur}") ) else - COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --update --update-to --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --simulate --extractor-info --list-keywords --error-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --no-colors --retries --http-timeout --proxy --source-address --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") ) + COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --update --update-to --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --resolve-json --simulate --extractor-info --list-keywords --error-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --print-traffic --no-colors --retries --http-timeout --proxy --source-address --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") ) fi } diff --git a/data/completion/gallery-dl.fish b/data/completion/gallery-dl.fish index a67cd63..7243998 100644 --- a/data/completion/gallery-dl.fish +++ b/data/completion/gallery-dl.fish @@ -20,6 +20,7 @@ complete -c gallery-dl -s 'v' -l 'verbose' -d 'Print various debugging informati complete -c gallery-dl -s 'g' -l 'get-urls' -d 'Print URLs instead of downloading' complete -c gallery-dl -s 'G' -l 'resolve-urls' -d 'Print URLs instead of downloading; resolve intermediary URLs' complete -c gallery-dl -s 'j' -l 'dump-json' -d 'Print JSON information' +complete -c gallery-dl -s 'J' -l 'resolve-json' -d 'Print JSON information; resolve intermediary URLs' complete -c gallery-dl -s 's' -l 'simulate' -d 'Simulate data extraction; do not download anything' complete -c gallery-dl -s 'E' -l 'extractor-info' -d 'Print extractor defaults and settings' complete -c gallery-dl -s 'K' -l 'list-keywords' -d 'Print a list of available keywords and example values for the given URLs' @@ -29,6 +30,7 @@ complete -c gallery-dl -l 'list-extractors' -d 'Print a list of extractor classe complete -c gallery-dl -r -F -l 'write-log' -d 'Write logging output to FILE' complete -c gallery-dl -r -F -l 'write-unsupported' -d 'Write URLs, which get emitted by other extractors but cannot be handled, to FILE' complete -c gallery-dl -l 'write-pages' -d 'Write downloaded intermediary pages to files in the current directory to debug problems' +complete -c gallery-dl -l 'print-traffic' -d 'Display sent and read HTTP traffic' complete -c gallery-dl -l 'no-colors' -d 'Do not emit ANSI color codes in output' complete -c gallery-dl -x -s 'R' -l 'retries' -d 'Maximum number of retries for failed HTTP requests or -1 for infinite retries (default: 4)' complete -c gallery-dl -x -l 'http-timeout' -d 'Timeout for HTTP connections (default: 30.0)' diff --git a/data/man/gallery-dl.1 b/data/man/gallery-dl.1 index 37529bf..d1eddd6 100644 --- a/data/man/gallery-dl.1 +++ b/data/man/gallery-dl.1 @@ -1,4 +1,4 @@ -.TH "GALLERY-DL" "1" "2024-06-22" "1.27.1" "gallery-dl Manual" +.TH "GALLERY-DL" "1" "2024-08-03" "1.27.2" "gallery-dl Manual" .\" disable hyphenation .nh @@ -80,6 +80,9 @@ Print URLs instead of downloading; resolve intermediary URLs .B "\-j, \-\-dump\-json" Print JSON information .TP +.B "\-J, \-\-resolve\-json" +Print JSON information; resolve intermediary URLs +.TP .B "\-s, \-\-simulate" Simulate data extraction; do not download anything .TP @@ -107,6 +110,9 @@ Write URLs, which get emitted by other extractors but cannot be handled, to FILE .B "\-\-write\-pages" Write downloaded intermediary pages to files in the current directory to debug problems .TP +.B "\-\-print\-traffic" +Display sent and read HTTP traffic +.TP .B "\-\-no\-colors" Do not emit ANSI color codes in output .TP diff --git a/data/man/gallery-dl.conf.5 b/data/man/gallery-dl.conf.5 index e3ed58a..8f75284 100644 --- a/data/man/gallery-dl.conf.5 +++ b/data/man/gallery-dl.conf.5 @@ -1,4 +1,4 @@ -.TH "GALLERY-DL.CONF" "5" "2024-06-22" "1.27.1" "gallery-dl Manual" +.TH "GALLERY-DL.CONF" "5" "2024-08-03" "1.27.2" "gallery-dl Manual" .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) @@ -456,6 +456,7 @@ response before \f[I]retrying\f[] the request. .br * \f[I]"0.5-1.5"\f[] \f[I][Danbooru]\f[], \f[I][E621]\f[], \f[I][foolfuuka]:search\f[], \f[I]itaku\f[], +\f[I]koharu\f[], \f[I]newgrounds\f[], \f[I][philomena]\f[], \f[I]pixiv:novel\f[], \f[I]plurk\f[], \f[I]poipiku\f[] , \f[I]pornpics\f[], \f[I]soundgasm\f[], \f[I]urlgalleries\f[], \f[I]vk\f[], \f[I]zerochan\f[] @@ -536,6 +537,8 @@ and optional for .br * \f[I]kemonoparty\f[] .br +* \f[I]koharu\f[] +.br * \f[I]mangadex\f[] .br * \f[I]mangoxo\f[] @@ -728,7 +731,7 @@ or a \f[I]list\f[] with IP and explicit port number as elements. \f[I]string\f[] .IP "Default:" 9 -\f[I]"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/115.0"\f[] +\f[I]"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:128.0) Gecko/20100101 Firefox/128.0"\f[] .IP "Description:" 4 User-Agent header value to be used for HTTP requests. @@ -1017,6 +1020,29 @@ but be aware that using external inputs for building local paths may pose a security risk. +.SS extractor.*.archive-event +.IP "Type:" 6 ++ \f[I]string\f[] ++ \f[I]list\f[] of \f[I]strings\f[] + +.IP "Default:" 9 +\f[I]"file"\f[] + +.IP "Example:" 4 +.br +* "file,skip" +.br +* ["file", "skip"] + +.IP "Description:" 4 +\f[I]Event(s)\f[] +for which IDs get written to an +\f[I]archive\f[]. + +Available events are: +\f[I]file\f[], \f[I]skip\f[] + + .SS extractor.*.archive-format .IP "Type:" 6 \f[I]string\f[] @@ -1075,25 +1101,33 @@ for available \f[I]PRAGMA\f[] statements and further details. .SS extractor.*.actions .IP "Type:" 6 .br -* \f[I]object\f[] (pattern -> action) +* \f[I]object\f[] (pattern -> action(s)) .br -* \f[I]list\f[] of \f[I]lists\f[] with 2 \f[I]strings\f[] as elements +* \f[I]list\f[] of \f[I]lists\f[] with pattern -> action(s) pairs as elements .IP "Example:" 4 .. code:: json { -"error" : "status \f[I]= 1", +"info:Logging in as .+" : "level = debug", "warning:(?i)unable to .+": "exit 127", -"info:Logging in as .+" : "level = debug" +"error" : [ +"status \f[I]= 1", +"exec notify.sh 'gdl error'", +"abort" +] } .. code:: json [ -["error" , "status \f[]= 1" ], +["info:Logging in as .+" , "level = debug"], ["warning:(?i)unable to .+", "exit 127" ], -["info:Logging in as .+" , "level = debug"] +["error" , [ +"status \f[]= 1", +"exec notify.sh 'gdl error'", +"abort" +]] ] @@ -1110,6 +1144,9 @@ matches logging messages of all levels \f[I]action\f[] is parsed as action type followed by (optional) arguments. +It is possible to specify more than one \f[I]action\f[] per \f[I]pattern\f[] +by providing them as a \f[I]list\f[]: \f[I]["<action1>", "<action2>", …]\f[] + Supported Action Types: \f[I]status\f[]: @@ -1128,12 +1165,21 @@ Modify severity level of the current logging message. .br Can be one of \f[I]debug\f[], \f[I]info\f[], \f[I]warning\f[], \f[I]error\f[] or an integer value. .br -\f[I]print\f[] +\f[I]print\f[]: Write argument to stdout. +\f[I]exec\f[]: +Run a shell command. +\f[I]abort\f[]: +Stop the current extractor run. +\f[I]terminate\f[]: +Stop the current extractor run, including parent extractors. \f[I]restart\f[]: Restart the current extractor run. \f[I]wait\f[]: -Stop execution until Enter is pressed. +Sleep for a given \f[I]Duration\f[] or +.br +wait until Enter is pressed when no argument was given. +.br \f[I]exit\f[]: Exit the program with the given argument as exit status. @@ -1642,6 +1688,23 @@ Sets the maximum depth of returned reply posts. Process reposts. +.SS extractor.cien.files +.IP "Type:" 6 +\f[I]list\f[] of \f[I]strings\f[] + +.IP "Default:" 9 +\f[I]["image", "video", "download", "gallery"]\f[] + +.IP "Description:" 4 +Determines the type and order of files to be downloaded. + +Available types are +\f[I]image\f[], +\f[I]video\f[], +\f[I]download\f[], +\f[I]gallery\f[]. + + .SS extractor.cyberdrop.domain .IP "Type:" 6 \f[I]string\f[] @@ -3004,6 +3067,36 @@ If the selected format is not available, the first in the list gets chosen (usually mp3). +.SS extractor.koharu.cbz +.IP "Type:" 6 +\f[I]bool\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Description:" 4 +Download each gallery as a single \f[I].cbz\f[] file. + +Disabling this option causes a gallery +to be downloaded as individual image files. + + +.SS extractor.koharu.format +.IP "Type:" 6 +\f[I]string\f[] + +.IP "Default:" 9 +\f[I]"original"\f[] + +.IP "Description:" 4 +Name of the image format to download. + +Available formats are +.br +\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[]/\f[I]"original"\f[] +.br + + .SS extractor.lolisafe.domain .IP "Type:" 6 \f[I]string\f[] @@ -4310,6 +4403,27 @@ or each inline image, use an extra HTTP request to find the URL to its full-resolution version. +.SS extractor.tumblr.pagination +.IP "Type:" 6 +\f[I]string\f[] + +.IP "Default:" 9 +\f[I]"offset"\f[] + +.IP "Description:" 4 +Controls how to paginate over blog posts. + +.br +* \f[I]"api"\f[]: \f[I]next\f[] parameter provided by the API +(potentially misses posts due to a +\f[I]bug\f[] +in Tumblr's API) +.br +* \f[I]"before"\f[]: timestamp of last post +.br +* \f[I]"offset"\f[]: post offset number + + .SS extractor.tumblr.ratelimit .IP "Type:" 6 \f[I]string\f[] @@ -4919,6 +5033,35 @@ Note: Requires \f[I]login\f[] or \f[I]cookies\f[] +.SS extractor.vsco.include +.IP "Type:" 6 +.br +* \f[I]string\f[] +.br +* \f[I]list\f[] of \f[I]strings\f[] + +.IP "Default:" 9 +\f[I]"gallery"\f[] + +.IP "Example:" 4 +.br +* "avatar,collection" +.br +* ["avatar", "collection"] + +.IP "Description:" 4 +A (comma-separated) list of subcategories to include +when processing a user profile. + +Possible values are +\f[I]"avatar"\f[], +\f[I]"gallery"\f[], +\f[I]"spaces"\f[], +\f[I]"collection"\f[], + +It is possible to use \f[I]"all"\f[] instead of listing all values separately. + + .SS extractor.vsco.videos .IP "Type:" 6 \f[I]bool\f[] @@ -5282,17 +5425,25 @@ Note: This requires 1 additional HTTP request per post. .SS extractor.[booru].url .IP "Type:" 6 -\f[I]string\f[] +.br +* \f[I]string\f[] +.br +* \f[I]list\f[] of \f[I]strings\f[] .IP "Default:" 9 \f[I]"file_url"\f[] .IP "Example:" 4 -"preview_url" +.br +* "preview_url" +.br +* ["sample_url", "preview_url", "file_url"} .IP "Description:" 4 Alternate field name to retrieve download URLs from. +When multiple names are given, download the first available one. + .SS extractor.[manga-extractor].chapter-reverse .IP "Type:" 6 @@ -6249,13 +6400,19 @@ If this option is set, \f[I]metadata.extension\f[] and .SS metadata.directory .IP "Type:" 6 -\f[I]string\f[] +.br +* \f[I]string\f[] +.br +* \f[I]list\f[] of \f[I]strings\f[] .IP "Default:" 9 \f[I]"."\f[] .IP "Example:" 4 -"metadata" +.br +* "metadata" +.br +* ["..", "metadata", "\\fF {id // 500 * 500}"] .IP "Description:" 4 Directory where metadata files are stored in relative to the @@ -6965,6 +7122,19 @@ Set this option to \f[I]null\f[] or an invalid path to disable this cache. +.SS filters-environment +.IP "Type:" 6 +\f[I]bool\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Description:" 4 +Evaluate filter expressions raising an exception as \f[I]false\f[] +instead of aborting the current extractor run +by wrapping them in a try/except block. + + .SS format-separator .IP "Type:" 6 \f[I]string\f[] |
