aboutsummaryrefslogtreecommitdiffstats
path: root/data
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2024-09-07 18:33:25 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2024-09-07 18:33:25 -0400
commit05335f2b4f60f6948edc96c71a7ef1c3ca71c9b3 (patch)
tree2c455afb2e2fcd51788500ce8a3455a1ef659b0e /data
parentc45c7a86c313075d1fbd5803e7efdda680b27cd7 (diff)
parent1f3ffe32342852fd9ea9e7704022488f3a1222bd (diff)
Update upstream source from tag 'upstream/1.27.4'
Update to upstream version '1.27.4' with Debian dir 9c7b608ab0b9fa99a0cd692418a8f3965bf3d1c3
Diffstat (limited to 'data')
-rw-r--r--data/completion/_gallery-dl10
-rw-r--r--data/completion/gallery-dl2
-rw-r--r--data/completion/gallery-dl.fish10
-rw-r--r--data/man/gallery-dl.122
-rw-r--r--data/man/gallery-dl.conf.5299
5 files changed, 314 insertions, 29 deletions
diff --git a/data/completion/_gallery-dl b/data/completion/_gallery-dl
index 3308e98..1353fa8 100644
--- a/data/completion/_gallery-dl
+++ b/data/completion/_gallery-dl
@@ -13,9 +13,7 @@ _arguments -s -S \
{-X,--extractors}'[Load external extractors from PATH]':'<path>' \
--user-agent'[User-Agent request header]':'<ua>' \
--clear-cache'[Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)]':'<module>' \
-{-U,--update}'[Update to the latest version]' \
---update-to'[Switch to a dfferent release channel (stable or dev) or upgrade/downgrade to a specific version]':'<channel[@tag]>' \
---update-check'[Check if a newer version is available]' \
+{-U,--update-check}'[Check if a newer version is available]' \
{-i,--input-file}'[Download URLs found in FILE ('\''-'\'' for stdin). More than one --input-file can be specified]':'<file>':_files \
{-I,--input-file-comment}'[Download URLs found in FILE. Comment them out after they were downloaded successfully.]':'<file>':_files \
{-x,--input-file-delete}'[Download URLs found in FILE. Delete them after they were downloaded successfully.]':'<file>':_files \
@@ -65,7 +63,7 @@ _arguments -s -S \
--netrc'[Enable .netrc authentication data]' \
{-C,--cookies}'[File to load additional cookies from]':'<file>':_files \
--cookies-export'[Export session cookies to FILE]':'<file>':_files \
---cookies-from-browser'[Name of the browser to load cookies from, with optional domain prefixed with '\''/'\'', keyring name prefixed with '\''+'\'', profile prefixed with '\'':'\'', and container prefixed with '\''::'\'' ('\''none'\'' for no container)]':'<browser[/domain][+keyring][:profile][::container]>' \
+--cookies-from-browser'[Name of the browser to load cookies from, with optional domain prefixed with '\''/'\'', keyring name prefixed with '\''+'\'', profile prefixed with '\'':'\'', and container prefixed with '\''::'\'' ('\''none'\'' for no container (default), '\''all'\'' for all containers)]':'<browser[/domain][+keyring][:profile][::container]>' \
{-A,--abort}'[Stop current extractor run after N consecutive file downloads were skipped]':'<n>' \
{-T,--terminate}'[Stop current and parent extractor runs after N consecutive file downloads were skipped]':'<n>' \
--filesize-min'[Do not download files smaller than SIZE (e.g. 500k or 2.5M)]':'<size>' \
@@ -84,7 +82,9 @@ _arguments -s -S \
--zip'[Store downloaded files in a ZIP archive]' \
--cbz'[Store downloaded files in a CBZ archive]' \
--mtime'[Set file modification times according to metadata selected by NAME. Examples: '\''date'\'' or '\''status\[date\]'\'']':'<name>' \
---ugoira'[Convert Pixiv Ugoira to FORMAT using FFmpeg. Supported formats are '\''webm'\'', '\''mp4'\'', '\''gif'\'', '\''vp8'\'', '\''vp9'\'', '\''vp9-lossless'\'', '\''copy'\''.]':'<format>' \
+--rename'[Rename previously downloaded files from FORMAT to the current filename format]':'<format>' \
+--rename-to'[Rename previously downloaded files from the current filename format to FORMAT]':'<format>' \
+--ugoira'[Convert Pixiv Ugoira to FMT using FFmpeg. Supported formats are '\''webm'\'', '\''mp4'\'', '\''gif'\'', '\''vp8'\'', '\''vp9'\'', '\''vp9-lossless'\'', '\''copy'\''.]':'<fmt>' \
--exec'[Execute CMD for each downloaded file. Supported replacement fields are {} or {_path}, {_directory}, {_filename}. Example: --exec "convert {} {}.png && rm {}"]':'<cmd>' \
--exec-after'[Execute CMD after all files were downloaded. Example: --exec-after "cd {_directory} && convert * ../doc.pdf"]':'<cmd>' && rc=0
diff --git a/data/completion/gallery-dl b/data/completion/gallery-dl
index 0d933fa..32d9705 100644
--- a/data/completion/gallery-dl
+++ b/data/completion/gallery-dl
@@ -10,7 +10,7 @@ _gallery_dl()
elif [[ "${prev}" =~ ^()$ ]]; then
COMPREPLY=( $(compgen -d -- "${cur}") )
else
- COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --update --update-to --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --resolve-json --simulate --extractor-info --list-keywords --error-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --print-traffic --no-colors --retries --http-timeout --proxy --source-address --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") )
+ COMPREPLY=( $(compgen -W "--help --version --filename --destination --directory --extractors --user-agent --clear-cache --update-check --input-file --input-file-comment --input-file-delete --no-input --quiet --warning --verbose --get-urls --resolve-urls --dump-json --resolve-json --simulate --extractor-info --list-keywords --error-file --list-modules --list-extractors --write-log --write-unsupported --write-pages --print-traffic --no-colors --retries --http-timeout --proxy --source-address --no-check-certificate --limit-rate --chunk-size --sleep --sleep-request --sleep-extractor --no-part --no-skip --no-mtime --no-download --option --config --config-yaml --config-toml --config-create --config-status --config-open --config-ignore --ignore-config --username --password --netrc --cookies --cookies-export --cookies-from-browser --abort --terminate --filesize-min --filesize-max --download-archive --range --chapter-range --filter --chapter-filter --postprocessor --no-postprocessors --postprocessor-option --write-metadata --write-info-json --write-infojson --write-tags --zip --cbz --mtime --mtime-from-date --rename --rename-to --ugoira --ugoira-conv --ugoira-conv-lossless --ugoira-conv-copy --exec --exec-after" -- "${cur}") )
fi
}
diff --git a/data/completion/gallery-dl.fish b/data/completion/gallery-dl.fish
index 7243998..971ba68 100644
--- a/data/completion/gallery-dl.fish
+++ b/data/completion/gallery-dl.fish
@@ -7,9 +7,7 @@ complete -c gallery-dl -x -a '(__fish_complete_directories)' -s 'D' -l 'director
complete -c gallery-dl -x -a '(__fish_complete_directories)' -s 'X' -l 'extractors' -d 'Load external extractors from PATH'
complete -c gallery-dl -x -l 'user-agent' -d 'User-Agent request header'
complete -c gallery-dl -x -l 'clear-cache' -d 'Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)'
-complete -c gallery-dl -s 'U' -l 'update' -d 'Update to the latest version'
-complete -c gallery-dl -x -l 'update-to' -d 'Switch to a dfferent release channel (stable or dev) or upgrade/downgrade to a specific version'
-complete -c gallery-dl -l 'update-check' -d 'Check if a newer version is available'
+complete -c gallery-dl -s 'U' -l 'update-check' -d 'Check if a newer version is available'
complete -c gallery-dl -r -F -s 'i' -l 'input-file' -d 'Download URLs found in FILE ("-" for stdin). More than one --input-file can be specified'
complete -c gallery-dl -r -F -s 'I' -l 'input-file-comment' -d 'Download URLs found in FILE. Comment them out after they were downloaded successfully.'
complete -c gallery-dl -r -F -s 'x' -l 'input-file-delete' -d 'Download URLs found in FILE. Delete them after they were downloaded successfully.'
@@ -60,7 +58,7 @@ complete -c gallery-dl -x -s 'p' -l 'password' -d 'Password belonging to the giv
complete -c gallery-dl -l 'netrc' -d 'Enable .netrc authentication data'
complete -c gallery-dl -r -F -s 'C' -l 'cookies' -d 'File to load additional cookies from'
complete -c gallery-dl -r -F -l 'cookies-export' -d 'Export session cookies to FILE'
-complete -c gallery-dl -x -l 'cookies-from-browser' -d 'Name of the browser to load cookies from, with optional domain prefixed with "/", keyring name prefixed with "+", profile prefixed with ":", and container prefixed with "::" ("none" for no container)'
+complete -c gallery-dl -x -l 'cookies-from-browser' -d 'Name of the browser to load cookies from, with optional domain prefixed with "/", keyring name prefixed with "+", profile prefixed with ":", and container prefixed with "::" ("none" for no container (default), "all" for all containers)'
complete -c gallery-dl -x -s 'A' -l 'abort' -d 'Stop current extractor run after N consecutive file downloads were skipped'
complete -c gallery-dl -x -s 'T' -l 'terminate' -d 'Stop current and parent extractor runs after N consecutive file downloads were skipped'
complete -c gallery-dl -x -l 'filesize-min' -d 'Do not download files smaller than SIZE (e.g. 500k or 2.5M)'
@@ -81,7 +79,9 @@ complete -c gallery-dl -l 'zip' -d 'Store downloaded files in a ZIP archive'
complete -c gallery-dl -l 'cbz' -d 'Store downloaded files in a CBZ archive'
complete -c gallery-dl -x -l 'mtime' -d 'Set file modification times according to metadata selected by NAME. Examples: "date" or "status[date]"'
complete -c gallery-dl -l 'mtime-from-date' -d '==SUPPRESS=='
-complete -c gallery-dl -x -l 'ugoira' -d 'Convert Pixiv Ugoira to FORMAT using FFmpeg. Supported formats are "webm", "mp4", "gif", "vp8", "vp9", "vp9-lossless", "copy".'
+complete -c gallery-dl -x -l 'rename' -d 'Rename previously downloaded files from FORMAT to the current filename format'
+complete -c gallery-dl -x -l 'rename-to' -d 'Rename previously downloaded files from the current filename format to FORMAT'
+complete -c gallery-dl -x -l 'ugoira' -d 'Convert Pixiv Ugoira to FMT using FFmpeg. Supported formats are "webm", "mp4", "gif", "vp8", "vp9", "vp9-lossless", "copy".'
complete -c gallery-dl -l 'ugoira-conv' -d '==SUPPRESS=='
complete -c gallery-dl -l 'ugoira-conv-lossless' -d '==SUPPRESS=='
complete -c gallery-dl -l 'ugoira-conv-copy' -d '==SUPPRESS=='
diff --git a/data/man/gallery-dl.1 b/data/man/gallery-dl.1
index 97af9f9..591daae 100644
--- a/data/man/gallery-dl.1
+++ b/data/man/gallery-dl.1
@@ -1,4 +1,4 @@
-.TH "GALLERY-DL" "1" "2024-08-10" "1.27.3" "gallery-dl Manual"
+.TH "GALLERY-DL" "1" "2024-09-06" "1.27.4" "gallery-dl Manual"
.\" disable hyphenation
.nh
@@ -41,13 +41,7 @@ User-Agent request header
.B "\-\-clear\-cache" \f[I]MODULE\f[]
Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything)
.TP
-.B "\-U, \-\-update"
-Update to the latest version
-.TP
-.B "\-\-update\-to" \f[I]CHANNEL[@TAG]\f[]
-Switch to a dfferent release channel (stable or dev) or upgrade/downgrade to a specific version
-.TP
-.B "\-\-update\-check"
+.B "\-U, \-\-update\-check"
Check if a newer version is available
.TP
.B "\-i, \-\-input\-file" \f[I]FILE\f[]
@@ -198,7 +192,7 @@ File to load additional cookies from
Export session cookies to FILE
.TP
.B "\-\-cookies\-from\-browser" \f[I]BROWSER[/DOMAIN][+KEYRING][:PROFILE][::CONTAINER]\f[]
-Name of the browser to load cookies from, with optional domain prefixed with '/', keyring name prefixed with '+', profile prefixed with ':', and container prefixed with '::' ('none' for no container)
+Name of the browser to load cookies from, with optional domain prefixed with '/', keyring name prefixed with '+', profile prefixed with ':', and container prefixed with '::' ('none' for no container (default), 'all' for all containers)
.TP
.B "\-A, \-\-abort" \f[I]N\f[]
Stop current extractor run after N consecutive file downloads were skipped
@@ -254,8 +248,14 @@ Store downloaded files in a CBZ archive
.B "\-\-mtime" \f[I]NAME\f[]
Set file modification times according to metadata selected by NAME. Examples: 'date' or 'status[date]'
.TP
-.B "\-\-ugoira" \f[I]FORMAT\f[]
-Convert Pixiv Ugoira to FORMAT using FFmpeg. Supported formats are 'webm', 'mp4', 'gif', 'vp8', 'vp9', 'vp9-lossless', 'copy'.
+.B "\-\-rename" \f[I]FORMAT\f[]
+Rename previously downloaded files from FORMAT to the current filename format
+.TP
+.B "\-\-rename\-to" \f[I]FORMAT\f[]
+Rename previously downloaded files from the current filename format to FORMAT
+.TP
+.B "\-\-ugoira" \f[I]FMT\f[]
+Convert Pixiv Ugoira to FMT using FFmpeg. Supported formats are 'webm', 'mp4', 'gif', 'vp8', 'vp9', 'vp9-lossless', 'copy'.
.TP
.B "\-\-exec" \f[I]CMD\f[]
Execute CMD for each downloaded file. Supported replacement fields are {} or {_path}, {_directory}, {_filename}. Example: --exec "convert {} {}.png && rm {}"
diff --git a/data/man/gallery-dl.conf.5 b/data/man/gallery-dl.conf.5
index 49c3ec3..e0d75ac 100644
--- a/data/man/gallery-dl.conf.5
+++ b/data/man/gallery-dl.conf.5
@@ -1,4 +1,4 @@
-.TH "GALLERY-DL.CONF" "5" "2024-08-10" "1.27.3" "gallery-dl Manual"
+.TH "GALLERY-DL.CONF" "5" "2024-09-06" "1.27.4" "gallery-dl Manual"
.\" disable hyphenation
.nh
.\" disable justification (adjust text to left margin only)
@@ -1697,6 +1697,22 @@ Sets the maximum depth of returned reply posts.
Process reposts.
+.SS extractor.bunkr.tlds
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]false\f[]
+
+.IP "Description:" 4
+Controls which \f[I]bunkr\f[] TLDs to accept.
+
+.br
+* \f[I]true\f[]: Match URLs with *all* possible TLDs (e.g. \f[I]bunkr.xyz\f[] or \f[I]bunkrrr.duck\f[])
+.br
+* \f[I]false\f[]: Match only URLs with known TLDs
+
+
.SS extractor.cien.files
.IP "Type:" 6
\f[I]list\f[] of \f[I]strings\f[]
@@ -2798,6 +2814,31 @@ Selects which API endpoints to use.
* \f[I]"graphql"\f[]: GraphQL API - lower-resolution media
+.SS extractor.instagram.cursor
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+.br
+* \f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Example:" 4
+"3414259811154179155_25025320"
+
+.IP "Description:" 4
+Controls from which position to start the extraction process from.
+
+.br
+* \f[I]true\f[]: Start from the beginning.
+Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end.
+.br
+* \f[I]false\f[]: Start from the beginning.
+.br
+* any \f[I]string\f[]: Start from the position defined by this value.
+
+
.SS extractor.instagram.include
.IP "Type:" 6
.br
@@ -2824,11 +2865,23 @@ Possible values are
\f[I]"tagged"\f[],
\f[I]"stories"\f[],
\f[I]"highlights"\f[],
+\f[I]"info"\f[],
\f[I]"avatar"\f[].
It is possible to use \f[I]"all"\f[] instead of listing all values separately.
+.SS extractor.instagram.max-posts
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]null\f[]
+
+.IP "Description:" 4
+Limit the number of posts to download.
+
+
.SS extractor.instagram.metadata
.IP "Type:" 6
\f[I]bool\f[]
@@ -3092,17 +3145,22 @@ to be downloaded as individual image files.
.SS extractor.koharu.format
.IP "Type:" 6
-\f[I]string\f[]
+.br
+* \f[I]string\f[]
+.br
+* \f[I]list\f[] of \f[I]strings\f[]
.IP "Default:" 9
-\f[I]"original"\f[]
+\f[I]["0", "1600", "1280", "980", "780"]\f[]
.IP "Description:" 4
-Name of the image format to download.
+Name(s) of the image format to download.
-Available formats are
+When more than one format is given, the first available one is selected.
+
+Possible formats are
.br
-\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[]/\f[I]"original"\f[]
+\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[] (original)
.br
@@ -4650,6 +4708,33 @@ Controls how to handle Cross Site Request Forgery (CSRF) tokens.
* \f[I]"cookies"\f[]: Use token given by the \f[I]ct0\f[] cookie if present.
+.SS extractor.twitter.cursor
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+.br
+* \f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Example:" 4
+"1/DAABCgABGVKi5lE___oKAAIYbfYNcxrQLggAAwAAAAIAAA"
+
+.IP "Description:" 4
+Controls from which position to start the extraction process from.
+
+.br
+* \f[I]true\f[]: Start from the beginning.
+Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end.
+.br
+* \f[I]false\f[]: Start from the beginning.
+.br
+* any \f[I]string\f[]: Start from the position defined by this value.
+
+Note: A \f[I]cursor\f[] value from one timeline cannot be used with another.
+
+
.SS extractor.twitter.expand
.IP "Type:" 6
\f[I]bool\f[]
@@ -4702,6 +4787,7 @@ A (comma-separated) list of subcategories to include
when processing a user profile.
Possible values are
+\f[I]"info"\f[],
\f[I]"avatar"\f[],
\f[I]"background"\f[],
\f[I]"timeline"\f[],
@@ -5241,6 +5327,19 @@ will be taken from the original posts, not the retweeted posts.
Download video files.
+.SS extractor.wikimedia.limit
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]50\f[]
+
+.IP "Description:" 4
+Number of results to return in a single API query.
+
+The value must be between 10 and 500.
+
+
.SS extractor.ytdl.cmdline-args
.IP "Type:" 6
.br
@@ -6372,6 +6471,97 @@ The event(s) for which \f[I]exec.command\f[] is run.
See \f[I]metadata.event\f[] for a list of available events.
+.SS hash.chunk-size
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]32768\f[]
+
+.IP "Description:" 4
+Number of bytes read per chunk during file hash computation.
+
+
+.SS hash.event
+.IP "Type:" 6
+.br
+* \f[I]string\f[]
+.br
+* \f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Default:" 9
+\f[I]"file"\f[]
+
+.IP "Description:" 4
+The event(s) for which \f[I]file hashes\f[] are computed.
+
+See \f[I]metadata.event\f[] for a list of available events.
+
+
+.SS hash.filename
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]false\f[]
+
+.IP "Description:" 4
+Rebuild \f[I]filenames\f[] after computing
+\f[I]hash digests\f[] and adding them to the metadata dict.
+
+
+.SS hash.hashes
+.IP "Type:" 6
+.br
+* \f[I]string\f[]
+.br
+* \f[I]object\f[] (field name -> hash algorithm)
+
+.IP "Default:" 9
+\f[I]"md5,sha1"\f[]
+
+.IP "Example:" 4
+.. code:: json
+
+"sha256:hash_sha,sha3_512:hash_sha3"
+
+.. code:: json
+
+{
+"hash_sha" : "sha256",
+"hash_sha3": "sha3_512"
+}
+
+
+.IP "Description:" 4
+Hash digests to compute.
+
+For a list of available hash algorithms, run
+
+.. code::
+
+python -c "import hashlib; print('\\n'.join(hashlib.algorithms_available))"
+
+or see \f[I]python/hashlib\f[].
+
+.br
+* If this is a \f[I]string\f[],
+it is parsed as a a comma-separated list of algorthm-fieldname pairs:
+
+.. code::
+
+[<hash algorithm> ":"] <field name> ["," ...]
+
+When \f[I]<hash algorithm>\f[] is omitted,
+\f[I]<field name>\f[] is used as algorithm name.
+
+.br
+* If this is an \f[I]object\f[],
+it is a \f[I]<field name>\f[] to \f[I]<algorithm name>\f[] mapping
+for hash digests to compute.
+
+
.SS metadata.mode
.IP "Type:" 6
\f[I]string\f[]
@@ -6538,6 +6728,32 @@ e.g. a Tweet on Twitter or a post on Patreon.
After downloading all files of a post
+.SS metadata.include
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Example:" 4
+["id", "width", "height", "description"]
+
+.IP "Description:" 4
+Include only the given top-level keys when writing JSON data.
+
+Note: Missing or undefined fields will be silently ignored.
+
+
+.SS metadata.exclude
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Example:" 4
+["blocked", "watching", "status"]
+
+.IP "Description:" 4
+Exclude all given keys from written JSON data.
+
+Note: Cannot be used with \f[I]metadata.include\f[].
+
+
.SS metadata.fields
.IP "Type:" 6
.br
@@ -6847,6 +7063,37 @@ and gets called with the current metadata dict as argument.
or the \f[I]Path\f[] to a .py file,
+.SS rename.from
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Description:" 4
+The \f[I]format string\f[] for filenames to rename.
+
+When no value is given, \f[I]extractor.*.filename\f[] is used.
+
+
+.SS rename.to
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Description:" 4
+The \f[I]format string\f[] for target filenames.
+
+When no value is given, \f[I]extractor.*.filename\f[] is used.
+
+
+.SS rename.skip
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Description:" 4
+Do not rename a file when another file with the target name already exists.
+
+
.SS ugoira.extension
.IP "Type:" 6
\f[I]string\f[]
@@ -7026,6 +7273,30 @@ Allow repeating the last frame when necessary
to prevent it from only being displayed for a very short amount of time.
+.SS ugoira.skip
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Description:" 4
+Do not convert frames if target file already exists.
+
+
+.SS zip.compression
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]"store"\f[]
+
+.IP "Description:" 4
+Compression method to use when writing the archive.
+
+Possible values are \f[I]"store"\f[], \f[I]"zip"\f[], \f[I]"bzip2"\f[], \f[I]"lzma"\f[].
+
+
.SS zip.extension
.IP "Type:" 6
\f[I]string\f[]
@@ -7191,6 +7462,17 @@ For example, setting this option to \f[I]"#"\f[] would allow a replacement
operation to be \f[I]Rold#new#\f[] instead of the default \f[I]Rold/new/\f[]
+.SS input-files
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]Path\f[]
+
+.IP "Example:" 4
+["~/urls.txt", "$HOME/input"]
+
+.IP "Description:" 4
+Additional input files.
+
+
.SS signals-ignore
.IP "Type:" 6
\f[I]list\f[] of \f[I]strings\f[]
@@ -7575,17 +7857,20 @@ Compare versions of the same file and replace/enumerate them on mismatch
.br
\f[I]exec\f[]
Execute external commands
+\f[I]hash\f[]
+Compute file hash digests
\f[I]metadata\f[]
Write metadata to separate files
\f[I]mtime\f[]
Set file modification time according to its metadata
\f[I]python\f[]
Call Python functions
+\f[I]rename\f[]
+Rename previously downloaded files
\f[I]ugoira\f[]
Convert Pixiv Ugoira to WebM using \f[I]ffmpeg\f[]
\f[I]zip\f[]
Store files in a ZIP archive
-\f[I]ytdl\f[]