summaryrefslogtreecommitdiffstats
path: root/data/man/gallery-dl.conf.5
diff options
context:
space:
mode:
Diffstat (limited to 'data/man/gallery-dl.conf.5')
-rw-r--r--data/man/gallery-dl.conf.5299
1 files changed, 292 insertions, 7 deletions
diff --git a/data/man/gallery-dl.conf.5 b/data/man/gallery-dl.conf.5
index 49c3ec3..e0d75ac 100644
--- a/data/man/gallery-dl.conf.5
+++ b/data/man/gallery-dl.conf.5
@@ -1,4 +1,4 @@
-.TH "GALLERY-DL.CONF" "5" "2024-08-10" "1.27.3" "gallery-dl Manual"
+.TH "GALLERY-DL.CONF" "5" "2024-09-06" "1.27.4" "gallery-dl Manual"
.\" disable hyphenation
.nh
.\" disable justification (adjust text to left margin only)
@@ -1697,6 +1697,22 @@ Sets the maximum depth of returned reply posts.
Process reposts.
+.SS extractor.bunkr.tlds
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]false\f[]
+
+.IP "Description:" 4
+Controls which \f[I]bunkr\f[] TLDs to accept.
+
+.br
+* \f[I]true\f[]: Match URLs with *all* possible TLDs (e.g. \f[I]bunkr.xyz\f[] or \f[I]bunkrrr.duck\f[])
+.br
+* \f[I]false\f[]: Match only URLs with known TLDs
+
+
.SS extractor.cien.files
.IP "Type:" 6
\f[I]list\f[] of \f[I]strings\f[]
@@ -2798,6 +2814,31 @@ Selects which API endpoints to use.
* \f[I]"graphql"\f[]: GraphQL API - lower-resolution media
+.SS extractor.instagram.cursor
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+.br
+* \f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Example:" 4
+"3414259811154179155_25025320"
+
+.IP "Description:" 4
+Controls from which position to start the extraction process from.
+
+.br
+* \f[I]true\f[]: Start from the beginning.
+Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end.
+.br
+* \f[I]false\f[]: Start from the beginning.
+.br
+* any \f[I]string\f[]: Start from the position defined by this value.
+
+
.SS extractor.instagram.include
.IP "Type:" 6
.br
@@ -2824,11 +2865,23 @@ Possible values are
\f[I]"tagged"\f[],
\f[I]"stories"\f[],
\f[I]"highlights"\f[],
+\f[I]"info"\f[],
\f[I]"avatar"\f[].
It is possible to use \f[I]"all"\f[] instead of listing all values separately.
+.SS extractor.instagram.max-posts
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]null\f[]
+
+.IP "Description:" 4
+Limit the number of posts to download.
+
+
.SS extractor.instagram.metadata
.IP "Type:" 6
\f[I]bool\f[]
@@ -3092,17 +3145,22 @@ to be downloaded as individual image files.
.SS extractor.koharu.format
.IP "Type:" 6
-\f[I]string\f[]
+.br
+* \f[I]string\f[]
+.br
+* \f[I]list\f[] of \f[I]strings\f[]
.IP "Default:" 9
-\f[I]"original"\f[]
+\f[I]["0", "1600", "1280", "980", "780"]\f[]
.IP "Description:" 4
-Name of the image format to download.
+Name(s) of the image format to download.
-Available formats are
+When more than one format is given, the first available one is selected.
+
+Possible formats are
.br
-\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[]/\f[I]"original"\f[]
+\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[] (original)
.br
@@ -4650,6 +4708,33 @@ Controls how to handle Cross Site Request Forgery (CSRF) tokens.
* \f[I]"cookies"\f[]: Use token given by the \f[I]ct0\f[] cookie if present.
+.SS extractor.twitter.cursor
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+.br
+* \f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Example:" 4
+"1/DAABCgABGVKi5lE___oKAAIYbfYNcxrQLggAAwAAAAIAAA"
+
+.IP "Description:" 4
+Controls from which position to start the extraction process from.
+
+.br
+* \f[I]true\f[]: Start from the beginning.
+Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end.
+.br
+* \f[I]false\f[]: Start from the beginning.
+.br
+* any \f[I]string\f[]: Start from the position defined by this value.
+
+Note: A \f[I]cursor\f[] value from one timeline cannot be used with another.
+
+
.SS extractor.twitter.expand
.IP "Type:" 6
\f[I]bool\f[]
@@ -4702,6 +4787,7 @@ A (comma-separated) list of subcategories to include
when processing a user profile.
Possible values are
+\f[I]"info"\f[],
\f[I]"avatar"\f[],
\f[I]"background"\f[],
\f[I]"timeline"\f[],
@@ -5241,6 +5327,19 @@ will be taken from the original posts, not the retweeted posts.
Download video files.
+.SS extractor.wikimedia.limit
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]50\f[]
+
+.IP "Description:" 4
+Number of results to return in a single API query.
+
+The value must be between 10 and 500.
+
+
.SS extractor.ytdl.cmdline-args
.IP "Type:" 6
.br
@@ -6372,6 +6471,97 @@ The event(s) for which \f[I]exec.command\f[] is run.
See \f[I]metadata.event\f[] for a list of available events.
+.SS hash.chunk-size
+.IP "Type:" 6
+\f[I]integer\f[]
+
+.IP "Default:" 9
+\f[I]32768\f[]
+
+.IP "Description:" 4
+Number of bytes read per chunk during file hash computation.
+
+
+.SS hash.event
+.IP "Type:" 6
+.br
+* \f[I]string\f[]
+.br
+* \f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Default:" 9
+\f[I]"file"\f[]
+
+.IP "Description:" 4
+The event(s) for which \f[I]file hashes\f[] are computed.
+
+See \f[I]metadata.event\f[] for a list of available events.
+
+
+.SS hash.filename
+.IP "Type:" 6
+.br
+* \f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]false\f[]
+
+.IP "Description:" 4
+Rebuild \f[I]filenames\f[] after computing
+\f[I]hash digests\f[] and adding them to the metadata dict.
+
+
+.SS hash.hashes
+.IP "Type:" 6
+.br
+* \f[I]string\f[]
+.br
+* \f[I]object\f[] (field name -> hash algorithm)
+
+.IP "Default:" 9
+\f[I]"md5,sha1"\f[]
+
+.IP "Example:" 4
+.. code:: json
+
+"sha256:hash_sha,sha3_512:hash_sha3"
+
+.. code:: json
+
+{
+"hash_sha" : "sha256",
+"hash_sha3": "sha3_512"
+}
+
+
+.IP "Description:" 4
+Hash digests to compute.
+
+For a list of available hash algorithms, run
+
+.. code::
+
+python -c "import hashlib; print('\\n'.join(hashlib.algorithms_available))"
+
+or see \f[I]python/hashlib\f[].
+
+.br
+* If this is a \f[I]string\f[],
+it is parsed as a a comma-separated list of algorthm-fieldname pairs:
+
+.. code::
+
+[<hash algorithm> ":"] <field name> ["," ...]
+
+When \f[I]<hash algorithm>\f[] is omitted,
+\f[I]<field name>\f[] is used as algorithm name.
+
+.br
+* If this is an \f[I]object\f[],
+it is a \f[I]<field name>\f[] to \f[I]<algorithm name>\f[] mapping
+for hash digests to compute.
+
+
.SS metadata.mode
.IP "Type:" 6
\f[I]string\f[]
@@ -6538,6 +6728,32 @@ e.g. a Tweet on Twitter or a post on Patreon.
After downloading all files of a post
+.SS metadata.include
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Example:" 4
+["id", "width", "height", "description"]
+
+.IP "Description:" 4
+Include only the given top-level keys when writing JSON data.
+
+Note: Missing or undefined fields will be silently ignored.
+
+
+.SS metadata.exclude
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]strings\f[]
+
+.IP "Example:" 4
+["blocked", "watching", "status"]
+
+.IP "Description:" 4
+Exclude all given keys from written JSON data.
+
+Note: Cannot be used with \f[I]metadata.include\f[].
+
+
.SS metadata.fields
.IP "Type:" 6
.br
@@ -6847,6 +7063,37 @@ and gets called with the current metadata dict as argument.
or the \f[I]Path\f[] to a .py file,
+.SS rename.from
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Description:" 4
+The \f[I]format string\f[] for filenames to rename.
+
+When no value is given, \f[I]extractor.*.filename\f[] is used.
+
+
+.SS rename.to
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Description:" 4
+The \f[I]format string\f[] for target filenames.
+
+When no value is given, \f[I]extractor.*.filename\f[] is used.
+
+
+.SS rename.skip
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Description:" 4
+Do not rename a file when another file with the target name already exists.
+
+
.SS ugoira.extension
.IP "Type:" 6
\f[I]string\f[]
@@ -7026,6 +7273,30 @@ Allow repeating the last frame when necessary
to prevent it from only being displayed for a very short amount of time.
+.SS ugoira.skip
+.IP "Type:" 6
+\f[I]bool\f[]
+
+.IP "Default:" 9
+\f[I]true\f[]
+
+.IP "Description:" 4
+Do not convert frames if target file already exists.
+
+
+.SS zip.compression
+.IP "Type:" 6
+\f[I]string\f[]
+
+.IP "Default:" 9
+\f[I]"store"\f[]
+
+.IP "Description:" 4
+Compression method to use when writing the archive.
+
+Possible values are \f[I]"store"\f[], \f[I]"zip"\f[], \f[I]"bzip2"\f[], \f[I]"lzma"\f[].
+
+
.SS zip.extension
.IP "Type:" 6
\f[I]string\f[]
@@ -7191,6 +7462,17 @@ For example, setting this option to \f[I]"#"\f[] would allow a replacement
operation to be \f[I]Rold#new#\f[] instead of the default \f[I]Rold/new/\f[]
+.SS input-files
+.IP "Type:" 6
+\f[I]list\f[] of \f[I]Path\f[]
+
+.IP "Example:" 4
+["~/urls.txt", "$HOME/input"]
+
+.IP "Description:" 4
+Additional input files.
+
+
.SS signals-ignore
.IP "Type:" 6
\f[I]list\f[] of \f[I]strings\f[]
@@ -7575,17 +7857,20 @@ Compare versions of the same file and replace/enumerate them on mismatch
.br
\f[I]exec\f[]
Execute external commands
+\f[I]hash\f[]
+Compute file hash digests
\f[I]metadata\f[]
Write metadata to separate files
\f[I]mtime\f[]
Set file modification time according to its metadata
\f[I]python\f[]
Call Python functions
+\f[I]rename\f[]
+Rename previously downloaded files
\f[I]ugoira\f[]
Convert Pixiv Ugoira to WebM using \f[I]ffmpeg\f[]
\f[I]zip\f[]
Store files in a ZIP archive
-\f[I]ytdl\f[]