diff options
| author | 2024-09-07 18:33:19 -0400 | |
|---|---|---|
| committer | 2024-09-07 18:33:19 -0400 | |
| commit | 1f3ffe32342852fd9ea9e7704022488f3a1222bd (patch) | |
| tree | cb255a091b73e96840de0f6f44b36dff1acab4b9 /data/man | |
| parent | b5e56c51e491b41f9eb6a895459c185788a377e5 (diff) | |
New upstream version 1.27.4.upstream/1.27.4
Diffstat (limited to 'data/man')
| -rw-r--r-- | data/man/gallery-dl.1 | 22 | ||||
| -rw-r--r-- | data/man/gallery-dl.conf.5 | 299 |
2 files changed, 303 insertions, 18 deletions
diff --git a/data/man/gallery-dl.1 b/data/man/gallery-dl.1 index 97af9f9..591daae 100644 --- a/data/man/gallery-dl.1 +++ b/data/man/gallery-dl.1 @@ -1,4 +1,4 @@ -.TH "GALLERY-DL" "1" "2024-08-10" "1.27.3" "gallery-dl Manual" +.TH "GALLERY-DL" "1" "2024-09-06" "1.27.4" "gallery-dl Manual" .\" disable hyphenation .nh @@ -41,13 +41,7 @@ User-Agent request header .B "\-\-clear\-cache" \f[I]MODULE\f[] Delete cached login sessions, cookies, etc. for MODULE (ALL to delete everything) .TP -.B "\-U, \-\-update" -Update to the latest version -.TP -.B "\-\-update\-to" \f[I]CHANNEL[@TAG]\f[] -Switch to a dfferent release channel (stable or dev) or upgrade/downgrade to a specific version -.TP -.B "\-\-update\-check" +.B "\-U, \-\-update\-check" Check if a newer version is available .TP .B "\-i, \-\-input\-file" \f[I]FILE\f[] @@ -198,7 +192,7 @@ File to load additional cookies from Export session cookies to FILE .TP .B "\-\-cookies\-from\-browser" \f[I]BROWSER[/DOMAIN][+KEYRING][:PROFILE][::CONTAINER]\f[] -Name of the browser to load cookies from, with optional domain prefixed with '/', keyring name prefixed with '+', profile prefixed with ':', and container prefixed with '::' ('none' for no container) +Name of the browser to load cookies from, with optional domain prefixed with '/', keyring name prefixed with '+', profile prefixed with ':', and container prefixed with '::' ('none' for no container (default), 'all' for all containers) .TP .B "\-A, \-\-abort" \f[I]N\f[] Stop current extractor run after N consecutive file downloads were skipped @@ -254,8 +248,14 @@ Store downloaded files in a CBZ archive .B "\-\-mtime" \f[I]NAME\f[] Set file modification times according to metadata selected by NAME. Examples: 'date' or 'status[date]' .TP -.B "\-\-ugoira" \f[I]FORMAT\f[] -Convert Pixiv Ugoira to FORMAT using FFmpeg. Supported formats are 'webm', 'mp4', 'gif', 'vp8', 'vp9', 'vp9-lossless', 'copy'. +.B "\-\-rename" \f[I]FORMAT\f[] +Rename previously downloaded files from FORMAT to the current filename format +.TP +.B "\-\-rename\-to" \f[I]FORMAT\f[] +Rename previously downloaded files from the current filename format to FORMAT +.TP +.B "\-\-ugoira" \f[I]FMT\f[] +Convert Pixiv Ugoira to FMT using FFmpeg. Supported formats are 'webm', 'mp4', 'gif', 'vp8', 'vp9', 'vp9-lossless', 'copy'. .TP .B "\-\-exec" \f[I]CMD\f[] Execute CMD for each downloaded file. Supported replacement fields are {} or {_path}, {_directory}, {_filename}. Example: --exec "convert {} {}.png && rm {}" diff --git a/data/man/gallery-dl.conf.5 b/data/man/gallery-dl.conf.5 index 49c3ec3..e0d75ac 100644 --- a/data/man/gallery-dl.conf.5 +++ b/data/man/gallery-dl.conf.5 @@ -1,4 +1,4 @@ -.TH "GALLERY-DL.CONF" "5" "2024-08-10" "1.27.3" "gallery-dl Manual" +.TH "GALLERY-DL.CONF" "5" "2024-09-06" "1.27.4" "gallery-dl Manual" .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) @@ -1697,6 +1697,22 @@ Sets the maximum depth of returned reply posts. Process reposts. +.SS extractor.bunkr.tlds +.IP "Type:" 6 +\f[I]bool\f[] + +.IP "Default:" 9 +\f[I]false\f[] + +.IP "Description:" 4 +Controls which \f[I]bunkr\f[] TLDs to accept. + +.br +* \f[I]true\f[]: Match URLs with *all* possible TLDs (e.g. \f[I]bunkr.xyz\f[] or \f[I]bunkrrr.duck\f[]) +.br +* \f[I]false\f[]: Match only URLs with known TLDs + + .SS extractor.cien.files .IP "Type:" 6 \f[I]list\f[] of \f[I]strings\f[] @@ -2798,6 +2814,31 @@ Selects which API endpoints to use. * \f[I]"graphql"\f[]: GraphQL API - lower-resolution media +.SS extractor.instagram.cursor +.IP "Type:" 6 +.br +* \f[I]bool\f[] +.br +* \f[I]string\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Example:" 4 +"3414259811154179155_25025320" + +.IP "Description:" 4 +Controls from which position to start the extraction process from. + +.br +* \f[I]true\f[]: Start from the beginning. +Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end. +.br +* \f[I]false\f[]: Start from the beginning. +.br +* any \f[I]string\f[]: Start from the position defined by this value. + + .SS extractor.instagram.include .IP "Type:" 6 .br @@ -2824,11 +2865,23 @@ Possible values are \f[I]"tagged"\f[], \f[I]"stories"\f[], \f[I]"highlights"\f[], +\f[I]"info"\f[], \f[I]"avatar"\f[]. It is possible to use \f[I]"all"\f[] instead of listing all values separately. +.SS extractor.instagram.max-posts +.IP "Type:" 6 +\f[I]integer\f[] + +.IP "Default:" 9 +\f[I]null\f[] + +.IP "Description:" 4 +Limit the number of posts to download. + + .SS extractor.instagram.metadata .IP "Type:" 6 \f[I]bool\f[] @@ -3092,17 +3145,22 @@ to be downloaded as individual image files. .SS extractor.koharu.format .IP "Type:" 6 -\f[I]string\f[] +.br +* \f[I]string\f[] +.br +* \f[I]list\f[] of \f[I]strings\f[] .IP "Default:" 9 -\f[I]"original"\f[] +\f[I]["0", "1600", "1280", "980", "780"]\f[] .IP "Description:" 4 -Name of the image format to download. +Name(s) of the image format to download. -Available formats are +When more than one format is given, the first available one is selected. + +Possible formats are .br -\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[]/\f[I]"original"\f[] +\f[I]"780"\f[], \f[I]"980"\f[], \f[I]"1280"\f[], \f[I]"1600"\f[], \f[I]"0"\f[] (original) .br @@ -4650,6 +4708,33 @@ Controls how to handle Cross Site Request Forgery (CSRF) tokens. * \f[I]"cookies"\f[]: Use token given by the \f[I]ct0\f[] cookie if present. +.SS extractor.twitter.cursor +.IP "Type:" 6 +.br +* \f[I]bool\f[] +.br +* \f[I]string\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Example:" 4 +"1/DAABCgABGVKi5lE___oKAAIYbfYNcxrQLggAAwAAAAIAAA" + +.IP "Description:" 4 +Controls from which position to start the extraction process from. + +.br +* \f[I]true\f[]: Start from the beginning. +Log the most recent \f[I]cursor\f[] value when interrupted before reaching the end. +.br +* \f[I]false\f[]: Start from the beginning. +.br +* any \f[I]string\f[]: Start from the position defined by this value. + +Note: A \f[I]cursor\f[] value from one timeline cannot be used with another. + + .SS extractor.twitter.expand .IP "Type:" 6 \f[I]bool\f[] @@ -4702,6 +4787,7 @@ A (comma-separated) list of subcategories to include when processing a user profile. Possible values are +\f[I]"info"\f[], \f[I]"avatar"\f[], \f[I]"background"\f[], \f[I]"timeline"\f[], @@ -5241,6 +5327,19 @@ will be taken from the original posts, not the retweeted posts. Download video files. +.SS extractor.wikimedia.limit +.IP "Type:" 6 +\f[I]integer\f[] + +.IP "Default:" 9 +\f[I]50\f[] + +.IP "Description:" 4 +Number of results to return in a single API query. + +The value must be between 10 and 500. + + .SS extractor.ytdl.cmdline-args .IP "Type:" 6 .br @@ -6372,6 +6471,97 @@ The event(s) for which \f[I]exec.command\f[] is run. See \f[I]metadata.event\f[] for a list of available events. +.SS hash.chunk-size +.IP "Type:" 6 +\f[I]integer\f[] + +.IP "Default:" 9 +\f[I]32768\f[] + +.IP "Description:" 4 +Number of bytes read per chunk during file hash computation. + + +.SS hash.event +.IP "Type:" 6 +.br +* \f[I]string\f[] +.br +* \f[I]list\f[] of \f[I]strings\f[] + +.IP "Default:" 9 +\f[I]"file"\f[] + +.IP "Description:" 4 +The event(s) for which \f[I]file hashes\f[] are computed. + +See \f[I]metadata.event\f[] for a list of available events. + + +.SS hash.filename +.IP "Type:" 6 +.br +* \f[I]bool\f[] + +.IP "Default:" 9 +\f[I]false\f[] + +.IP "Description:" 4 +Rebuild \f[I]filenames\f[] after computing +\f[I]hash digests\f[] and adding them to the metadata dict. + + +.SS hash.hashes +.IP "Type:" 6 +.br +* \f[I]string\f[] +.br +* \f[I]object\f[] (field name -> hash algorithm) + +.IP "Default:" 9 +\f[I]"md5,sha1"\f[] + +.IP "Example:" 4 +.. code:: json + +"sha256:hash_sha,sha3_512:hash_sha3" + +.. code:: json + +{ +"hash_sha" : "sha256", +"hash_sha3": "sha3_512" +} + + +.IP "Description:" 4 +Hash digests to compute. + +For a list of available hash algorithms, run + +.. code:: + +python -c "import hashlib; print('\\n'.join(hashlib.algorithms_available))" + +or see \f[I]python/hashlib\f[]. + +.br +* If this is a \f[I]string\f[], +it is parsed as a a comma-separated list of algorthm-fieldname pairs: + +.. code:: + +[<hash algorithm> ":"] <field name> ["," ...] + +When \f[I]<hash algorithm>\f[] is omitted, +\f[I]<field name>\f[] is used as algorithm name. + +.br +* If this is an \f[I]object\f[], +it is a \f[I]<field name>\f[] to \f[I]<algorithm name>\f[] mapping +for hash digests to compute. + + .SS metadata.mode .IP "Type:" 6 \f[I]string\f[] @@ -6538,6 +6728,32 @@ e.g. a Tweet on Twitter or a post on Patreon. After downloading all files of a post +.SS metadata.include +.IP "Type:" 6 +\f[I]list\f[] of \f[I]strings\f[] + +.IP "Example:" 4 +["id", "width", "height", "description"] + +.IP "Description:" 4 +Include only the given top-level keys when writing JSON data. + +Note: Missing or undefined fields will be silently ignored. + + +.SS metadata.exclude +.IP "Type:" 6 +\f[I]list\f[] of \f[I]strings\f[] + +.IP "Example:" 4 +["blocked", "watching", "status"] + +.IP "Description:" 4 +Exclude all given keys from written JSON data. + +Note: Cannot be used with \f[I]metadata.include\f[]. + + .SS metadata.fields .IP "Type:" 6 .br @@ -6847,6 +7063,37 @@ and gets called with the current metadata dict as argument. or the \f[I]Path\f[] to a .py file, +.SS rename.from +.IP "Type:" 6 +\f[I]string\f[] + +.IP "Description:" 4 +The \f[I]format string\f[] for filenames to rename. + +When no value is given, \f[I]extractor.*.filename\f[] is used. + + +.SS rename.to +.IP "Type:" 6 +\f[I]string\f[] + +.IP "Description:" 4 +The \f[I]format string\f[] for target filenames. + +When no value is given, \f[I]extractor.*.filename\f[] is used. + + +.SS rename.skip +.IP "Type:" 6 +\f[I]bool\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Description:" 4 +Do not rename a file when another file with the target name already exists. + + .SS ugoira.extension .IP "Type:" 6 \f[I]string\f[] @@ -7026,6 +7273,30 @@ Allow repeating the last frame when necessary to prevent it from only being displayed for a very short amount of time. +.SS ugoira.skip +.IP "Type:" 6 +\f[I]bool\f[] + +.IP "Default:" 9 +\f[I]true\f[] + +.IP "Description:" 4 +Do not convert frames if target file already exists. + + +.SS zip.compression +.IP "Type:" 6 +\f[I]string\f[] + +.IP "Default:" 9 +\f[I]"store"\f[] + +.IP "Description:" 4 +Compression method to use when writing the archive. + +Possible values are \f[I]"store"\f[], \f[I]"zip"\f[], \f[I]"bzip2"\f[], \f[I]"lzma"\f[]. + + .SS zip.extension .IP "Type:" 6 \f[I]string\f[] @@ -7191,6 +7462,17 @@ For example, setting this option to \f[I]"#"\f[] would allow a replacement operation to be \f[I]Rold#new#\f[] instead of the default \f[I]Rold/new/\f[] +.SS input-files +.IP "Type:" 6 +\f[I]list\f[] of \f[I]Path\f[] + +.IP "Example:" 4 +["~/urls.txt", "$HOME/input"] + +.IP "Description:" 4 +Additional input files. + + .SS signals-ignore .IP "Type:" 6 \f[I]list\f[] of \f[I]strings\f[] @@ -7575,17 +7857,20 @@ Compare versions of the same file and replace/enumerate them on mismatch .br \f[I]exec\f[] Execute external commands +\f[I]hash\f[] +Compute file hash digests \f[I]metadata\f[] Write metadata to separate files \f[I]mtime\f[] Set file modification time according to its metadata \f[I]python\f[] Call Python functions +\f[I]rename\f[] +Rename previously downloaded files \f[I]ugoira\f[] Convert Pixiv Ugoira to WebM using \f[I]ffmpeg\f[] \f[I]zip\f[] Store files in a ZIP archive -\f[I]ytdl\f[] |
