aboutsummaryrefslogtreecommitdiffstats
path: root/gallery_dl/extractor/wikimedia.py
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2025-07-31 01:22:07 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2025-07-31 01:22:07 -0400
commitd9539f96cc7ac112b7d8faad022190fbbc88c745 (patch)
tree471249d60b9202c00d7d82abec8b296fc881292e /gallery_dl/extractor/wikimedia.py
parent889fc15f272118bf277737b6fac29d3faeffc641 (diff)
parenta6e995c093de8aae2e91a0787281bb34c0b871eb (diff)
Update upstream source from tag 'upstream/1.30.2'
Update to upstream version '1.30.2' with Debian dir f0dcd28a671f8600479182ff128e05ba8904a0d8
Diffstat (limited to 'gallery_dl/extractor/wikimedia.py')
-rw-r--r--gallery_dl/extractor/wikimedia.py29
1 files changed, 15 insertions, 14 deletions
diff --git a/gallery_dl/extractor/wikimedia.py b/gallery_dl/extractor/wikimedia.py
index 3b23f3a..e927bc1 100644
--- a/gallery_dl/extractor/wikimedia.py
+++ b/gallery_dl/extractor/wikimedia.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Copyright 2022 Ailothaen
-# Copyright 2024 Mike Fährmann
+# Copyright 2024-2025 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -27,14 +27,17 @@ class WikimediaExtractor(BaseExtractor):
if self.category == "wikimedia":
self.category = self.root.split(".")[-2]
elif self.category in ("fandom", "wikigg"):
- self.category = "{}-{}".format(
- self.category, self.root.partition(".")[0].rpartition("/")[2])
+ self.category = (
+ f"{self.category}-"
+ f"{self.root.partition('.')[0].rpartition('/')[2]}")
self.per_page = self.config("limit", 50)
+ if useragent := self.config_instance("useragent"):
+ self.useragent = useragent
+
def _init(self):
- api_path = self.config_instance("api-path")
- if api_path:
+ if api_path := self.config_instance("api-path"):
if api_path[0] == "/":
self.api_url = self.root + api_path
else:
@@ -50,10 +53,9 @@ class WikimediaExtractor(BaseExtractor):
response = self.request(url, method="HEAD", fatal=None)
if response.status_code < 400:
return url
- raise exception.StopExtraction("Unable to find API endpoint")
+ raise exception.AbortExtraction("Unable to find API endpoint")
- @staticmethod
- def prepare(image):
+ def prepare(self, image):
"""Adjust the content of an image object"""
image["metadata"] = {
m["name"]: m["value"]
@@ -107,17 +109,15 @@ class WikimediaExtractor(BaseExtractor):
)
while True:
- data = self.request(url, params=params).json()
+ data = self.request_json(url, params=params)
# ref: https://www.mediawiki.org/wiki/API:Errors_and_warnings
- error = data.get("error")
- if error:
+ if error := data.get("error"):
self.log.error("%s: %s", error["code"], error["info"])
return
# MediaWiki will emit warnings for non-fatal mistakes such as
# invalid parameter instead of raising an error
- warnings = data.get("warnings")
- if warnings:
+ if warnings := data.get("warnings"):
self.log.debug("MediaWiki returned warnings: %s", warnings)
try:
@@ -187,6 +187,7 @@ BASE_PATTERN = WikimediaExtractor.update({
"root": "https://azurlane.koumakan.jp",
"pattern": r"azurlane\.koumakan\.jp",
"api-path": "/w/api.php",
+ "useragent": "Googlebot-Image/1.0",
},
})
@@ -238,7 +239,7 @@ class WikimediaArticleExtractor(WikimediaExtractor):
}
def prepare(self, image):
- WikimediaExtractor.prepare(image)
+ WikimediaExtractor.prepare(self, image)
image["page"] = self.title