aboutsummaryrefslogtreecommitdiffstats
path: root/data
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2021-06-22 22:30:36 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2021-06-22 22:30:36 -0400
commit32de2b06db501c7de81678bce8e3e0c3e63d340c (patch)
treefd58a26618a73de0faaf3e9c435a806aed7eced3 /data
parent8a644b7a06c504263a478d3681eed10b4161b5be (diff)
New upstream version 1.18.0.upstream/1.18.0
Diffstat (limited to 'data')
-rw-r--r--data/completion/_gallery-dl10
-rw-r--r--data/completion/gallery-dl2
-rw-r--r--data/man/gallery-dl.118
-rw-r--r--data/man/gallery-dl.conf.543
4 files changed, 53 insertions, 20 deletions
diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl
index 436260b..15806e8 100644
--- a/data/completion/_gallery-dl
+++ b/data/completion/_gallery-dl
@@ -11,7 +11,7 @@ _arguments -C -S \
{-i,--input-file}'[Download URLs found in FILE ("-" for stdin). More than one --input-file can be specified]':'<file>':_files \
--cookies'[File to load additional cookies from]':'<file>':_files \
--proxy'[Use the specified proxy]':'<url>' \
---clear-cache'[Delete all cached login sessions, cookies, etc.]':'<module>' \
+--clear-cache'[Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)]':'<module>' \
{-q,--quiet}'[Activate quiet mode]' \
{-v,--verbose}'[Print various debugging information]' \
{-g,--get-urls}'[Print URLs instead of downloading]' \
@@ -27,7 +27,6 @@ _arguments -C -S \
--write-pages'[Write downloaded intermediary pages to files in the current directory to debug problems]' \
{-r,--limit-rate}'[Maximum download rate (e.g. 500k or 2.5M)]':'<rate>' \
{-R,--retries}'[Maximum number of retries for failed HTTP requests or -1 for infinite retries (default: 4)]':'<n>' \
-{-A,--abort}'[Abort extractor run after N consecutive file downloads have been skipped, e.g. if files with the same filename already exist]':'<n>' \
--http-timeout'[Timeout for HTTP connections (default: 30.0)]':'<seconds>' \
--sleep'[Number of seconds to sleep before each download]':'<seconds>' \
--filesize-min'[Do not download files smaller than SIZE (e.g. 500k or 2.5M)]':'<size>' \
@@ -44,7 +43,9 @@ _arguments -C -S \
{-u,--username}'[Username to login with]':'<user>' \
{-p,--password}'[Password belonging to the given username]':'<pass>' \
--netrc'[Enable .netrc authentication data]' \
---download-archive'[Record all downloaded files in the archive file and skip downloading any file already in it.]':'<file>':_files \
+--download-archive'[Record all downloaded files in the archive file and skip downloading any file already in it]':'<file>':_files \
+{-A,--abort}'[Stop current extractor run after N consecutive file downloads were skipped]':'<n>' \
+{-T,--terminate}'[Stop current and parent extractor runs after N consecutive file downloads were skipped]':'<n>' \
--range'[Index-range(s) specifying which images to download. For example "5-10" or "1,3-5,10-"]':'<range>' \
--chapter-range'[Like "--range", but applies to manga-chapters and other delegated URLs]':'<range>' \
--filter'[Python expression controlling which images to download. Files for which the expression evaluates to False are ignored. Available keys are the filename-specific ones listed by "-K". Example: --filter "image_width >= 1000 and rating in ("s", "q")"]':'<expr>' \
@@ -56,6 +57,7 @@ _arguments -C -S \
--write-tags'[Write image tags to separate text files]' \
--mtime-from-date'[Set file modification times according to "date" metadata]' \
--exec'[Execute CMD for each downloaded file. Example: --exec "convert {} {}.png && rm {}"]':'<cmd>' \
---exec-after'[Execute CMD after all files were downloaded successfully. Example: --exec-after "cd {} && convert * ../doc.pdf"]':'<cmd>' && rc=0
+--exec-after'[Execute CMD after all files were downloaded successfully. Example: --exec-after "cd {} && convert * ../doc.pdf"]':'<cmd>' \
+{-P,--postprocessor}'[Activate the specified post processor]':'<name>' && rc=0
return rc
diff --git a/data/completion/gallery-dl b/data/completion/gallery-dl
index 9a3a63e..f3d1100 100644
--- a/data/completion/gallery-dl
+++ b/data/completion/gallery-dl
@@ -10,7 +10,7 @@ _gallery_dl()
elif [[ "${prev}" =~ ^(-d|--dest)$ ]]; then
COMPREPLY=( $(compgen -d -- "${cur}") )
else
- COMPREPLY=( $(compgen -W "--help --version --dest --input-file --cookies --proxy --clear-cache --quiet --verbose --get-urls --resolve-urls --dump-json --simulate --extractor-info --list-keywords --list-modules --list-extractors --write-log --write-unsupported --write-pages --limit-rate --retries --abort --http-timeout --sleep --filesize-min --filesize-max --no-part --no-skip --no-mtime --no-download --no-check-certificate --config --config-yaml --option --ignore-config --username --password --netrc --download-archive --range --chapter-range --filter --chapter-filter --zip --ugoira-conv --ugoira-conv-lossless --write-metadata --write-tags --mtime-from-date --exec --exec-after" -- "${cur}") )
+ COMPREPLY=( $(compgen -W "--help --version --dest --input-file --cookies --proxy --clear-cache --quiet --verbose --get-urls --resolve-urls --dump-json --simulate --extractor-info --list-keywords --list-modules --list-extractors --write-log --write-unsupported --write-pages --limit-rate --retries --http-timeout --sleep --filesize-min --filesize-max --no-part --no-skip --no-mtime --no-download --no-check-certificate --config --config-yaml --option --ignore-config --username --password --netrc --download-archive --abort --terminate --range --chapter-range --filter --chapter-filter --zip --ugoira-conv --ugoira-conv-lossless --write-metadata --write-tags --mtime-from-date --exec --exec-after --postprocessor" -- "${cur}") )
fi
}
diff --git a/data/man/gallery-dl.1 b/data/man/gallery-dl.1
index 719b8b4..25da021 100644
--- a/data/man/gallery-dl.1
+++ b/data/man/gallery-dl.1
@@ -1,4 +1,4 @@
-.TH "GALLERY-DL" "1" "2021-05-30" "1.17.5" "gallery-dl Manual"
+.TH "GALLERY-DL" "1" "2021-06-19" "1.18.0" "gallery-dl Manual"
.\" disable hyphenation
.nh
@@ -36,7 +36,7 @@ File to load additional cookies from
Use the specified proxy
.TP
.B "\-\-clear\-cache" \f[I]MODULE\f[]
-Delete all cached login sessions, cookies, etc.
+Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)
.TP
.B "\-q, \-\-quiet"
Activate quiet mode
@@ -83,9 +83,6 @@ Maximum download rate (e.g. 500k or 2.5M)
.B "\-R, \-\-retries" \f[I]N\f[]
Maximum number of retries for failed HTTP requests or -1 for infinite retries (default: 4)
.TP
-.B "\-A, \-\-abort" \f[I]N\f[]
-Abort extractor run after N consecutive file downloads have been skipped, e.g. if files with the same filename already exist
-.TP
.B "\-\-http\-timeout" \f[I]SECONDS\f[]
Timeout for HTTP connections (default: 30.0)
.TP
@@ -132,7 +129,13 @@ Password belonging to the given username
Enable .netrc authentication data
.TP
.B "\-\-download\-archive" \f[I]FILE\f[]
-Record all downloaded files in the archive file and skip downloading any file already in it.
+Record all downloaded files in the archive file and skip downloading any file already in it
+.TP
+.B "\-A, \-\-abort" \f[I]N\f[]
+Stop current extractor run after N consecutive file downloads were skipped
+.TP
+.B "\-T, \-\-terminate" \f[I]N\f[]
+Stop current and parent extractor runs after N consecutive file downloads were skipped
.TP
.B "\-\-range" \f[I]RANGE\f[]
Index-range(s) specifying which images to download. For example '5-10' or '1,3-5,10-'
@@ -169,6 +172,9 @@ Execute CMD for each downloaded file. Example: --exec 'convert {} {}.png && rm {
.TP
.B "\-\-exec\-after" \f[I]CMD\f[]
Execute CMD after all files were downloaded successfully. Example: --exec-after 'cd {} && convert * ../doc.pdf'
+.TP
+.B "\-P, \-\-postprocessor" \f[I]NAME\f[]
+Activate the specified post processor
.SH EXAMPLES
.TP
diff --git a/data/man/gallery-dl.conf.5 b/data/man/gallery-dl.conf.5
index f35f218..84e8e0e 100644
--- a/data/man/gallery-dl.conf.5
+++ b/data/man/gallery-dl.conf.5
@@ -1,4 +1,4 @@
-.TH "GALLERY-DL.CONF" "5" "2021-05-30" "1.17.5" "gallery-dl Manual"
+.TH "GALLERY-DL.CONF" "5" "2021-06-19" "1.18.0" "gallery-dl Manual"
.\" disable hyphenation
.nh
.\" disable justification (adjust text to left margin only)
@@ -75,14 +75,31 @@ those as makeshift comments by settings their values to arbitrary strings.
.SH EXTRACTOR OPTIONS
.SS extractor.*.filename
.IP "Type:" 6
-\f[I]string\f[]
+\f[I]string\f[] or \f[I]object\f[]
.IP "Example:" 4
+.br
+* .. code::
+
"{manga}_c{chapter}_{page:>03}.{extension}"
+.br
+* .. code:: json
+
+{
+"extension == 'mp4'": "{id}_video.{extension}",
+"'nature' in title" : "{id}_{title}.{extension}",
+"" : "{id}_default.{extension}"
+}
+
+
.IP "Description:" 4
-A \f[I]format string\f[] to build the resulting filename
-for a downloaded file.
+A \f[I]format string\f[] to build filenames for downloaded files with.
+
+If this is an \f[I]object\f[], it must contain Python expressions mapping to the
+filename format strings to use.
+These expressions are evaluated in the order as specified in Python 3.6+
+and in an undetermined order in Python 3.4 and 3.5.
The available replacement keys depend on the extractor used. A list
of keys for a specific one can be acquired by calling *gallery-dl*
@@ -358,9 +375,9 @@ and optional for
.br
* \f[I]aryion\f[]
.br
-* \f[I]danbooru\f[]
+* \f[I]danbooru\f[] (*)
.br
-* \f[I]e621\f[]
+* \f[I]e621\f[] (*)
.br
* \f[I]exhentai\f[]
.br
@@ -372,6 +389,8 @@ and optional for
.br
* \f[I]instagram\f[]
.br
+* \f[I]mangadex\f[]
+.br
* \f[I]mangoxo\f[]
.br
* \f[I]pillowfort\f[]
@@ -392,7 +411,7 @@ These values can also be specified via the
\f[I]-u/--username\f[] and \f[I]-p/--password\f[] command-line options or
by using a \f[I].netrc\f[] file. (see Authentication_)
-Note: The password value for \f[I]danbooru\f[] and \f[I]e621\f[] should be
+(*) The password value for \f[I]danbooru\f[] and \f[I]e621\f[] should be
the API key found in your user profile, not the actual account password.
@@ -1900,7 +1919,7 @@ Fetch media from all Tweets and replies in a \f[I]conversation
\f[I]bool\f[]
.IP "Default:" 9
-\f[I]true\f[]
+\f[I]false\f[]
.IP "Description:" 4
Fetch media from quoted Tweets.
@@ -1922,7 +1941,7 @@ Fetch media from replies to other Tweets.
\f[I]bool\f[]
.IP "Default:" 9
-\f[I]true\f[]
+\f[I]false\f[]
.IP "Description:" 4
Fetch media from Retweets.
@@ -3206,12 +3225,18 @@ logging output to a file.
"name" : "zip",
"compression": "store",
"extension" : "cbz",
+"filter" : "extension not in ('zip', 'rar')",
"whitelist" : ["mangadex", "exhentai", "nhentai"]
}
.IP "Description:" 4
An \f[I]object\f[] containing a \f[I]"name"\f[] attribute specifying the
post-processor type, as well as any of its \f[I]options\f[].
+
+It is possible to set a \f[I]"filter"\f[] expression similar to
+\f[I]image-filter\f[] to only run a post-processor
+conditionally.
+
It is also possible set a \f[I]"whitelist"\f[] or \f[I]"blacklist"\f[] to
only enable or disable a post-processor for the specified
extractor categories.