diff options
| author | 2020-12-30 18:41:48 -0500 | |
|---|---|---|
| committer | 2020-12-30 18:41:48 -0500 | |
| commit | 87a5aa088ce33a1196ff409b76a9ea8233bdc634 (patch) | |
| tree | 7e6155edcc5dd12e40b47ad814b3bc69e65c52fc /gallery_dl/extractor/danbooru.py | |
| parent | 8f7c87a2697113134c311aaeafd9c919555a2741 (diff) | |
New upstream version 1.16.1.upstream/1.16.1
Diffstat (limited to 'gallery_dl/extractor/danbooru.py')
| -rw-r--r-- | gallery_dl/extractor/danbooru.py | 38 |
1 files changed, 24 insertions, 14 deletions
diff --git a/gallery_dl/extractor/danbooru.py b/gallery_dl/extractor/danbooru.py index ca37cb4..33797f9 100644 --- a/gallery_dl/extractor/danbooru.py +++ b/gallery_dl/extractor/danbooru.py @@ -12,7 +12,6 @@ from .common import Extractor, Message from .. import text import datetime - BASE_PATTERN = ( r"(?:https?://)?" r"(danbooru|hijiribe|sonohara|safebooru)" @@ -33,7 +32,6 @@ class DanbooruExtractor(Extractor): super().__init__(match) self.root = "https://{}.donmai.us".format(match.group(1)) self.ugoira = self.config("ugoira", False) - self.params = {} username, api_key = self._get_auth_info() if username: @@ -71,13 +69,16 @@ class DanbooruExtractor(Extractor): yield Message.Url, url, post def metadata(self): - return {} + return () def posts(self): - return self._pagination(self.root + "/posts.json") + return () + + def _pagination(self, endpoint, params=None, pagenum=False): + url = self.root + endpoint - def _pagination(self, url, pagenum=False): - params = self.params.copy() + if params is None: + params = {} params["limit"] = self.per_page params["page"] = self.page_start @@ -122,10 +123,14 @@ class DanbooruTagExtractor(DanbooruExtractor): def __init__(self, match): super().__init__(match) - self.params["tags"] = text.unquote(match.group(2).replace("+", " ")) + self.tags = text.unquote(match.group(2).replace("+", " ")) def metadata(self): - return {"search_tags": self.params["tags"]} + return {"search_tags": self.tags} + + def posts(self): + params = {"tags": self.tags} + return self._pagination("/posts.json", params) class DanbooruPoolExtractor(DanbooruExtractor): @@ -141,15 +146,19 @@ class DanbooruPoolExtractor(DanbooruExtractor): def __init__(self, match): super().__init__(match) self.pool_id = match.group(2) - self.params["tags"] = "pool:" + self.pool_id + self.post_ids = () def metadata(self): url = "{}/pools/{}.json".format(self.root, self.pool_id) pool = self.request(url).json() pool["name"] = pool["name"].replace("_", " ") - del pool["post_ids"] + self.post_ids = pool.pop("post_ids") return {"pool": pool} + def posts(self): + params = {"tags": "pool:" + self.pool_id} + return self._pagination("/posts.json", params) + class DanbooruPostExtractor(DanbooruExtractor): """Extractor for single danbooru posts""" @@ -193,10 +202,9 @@ class DanbooruPopularExtractor(DanbooruExtractor): def __init__(self, match): super().__init__(match) - self.params.update(text.parse_query(match.group(2))) + self.params = text.parse_query(match.group(2)) def metadata(self): - self.page_start = self.page_start or 1 scale = self.params.get("scale", "day") date = self.params.get("date") or datetime.date.today().isoformat() @@ -209,5 +217,7 @@ class DanbooruPopularExtractor(DanbooruExtractor): return {"date": date, "scale": scale} def posts(self): - url = self.root + "/explore/posts/popular.json" - return self._pagination(url, True) + if self.page_start is None: + self.page_start = 1 + return self._pagination( + "/explore/posts/popular.json", self.params, True) |
