aboutsummaryrefslogtreecommitdiffstats
path: root/gallery_dl/extractor/pixiv.py
diff options
context:
space:
mode:
Diffstat (limited to 'gallery_dl/extractor/pixiv.py')
-rw-r--r--gallery_dl/extractor/pixiv.py50
1 files changed, 41 insertions, 9 deletions
diff --git a/gallery_dl/extractor/pixiv.py b/gallery_dl/extractor/pixiv.py
index 7901149..36fa0fe 100644
--- a/gallery_dl/extractor/pixiv.py
+++ b/gallery_dl/extractor/pixiv.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2014-2019 Mike Fährmann
+# Copyright 2014-2020 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -13,6 +13,7 @@ from .. import text, exception
from ..cache import cache
from datetime import datetime, timedelta
import hashlib
+import time
class PixivExtractor(Extractor):
@@ -21,6 +22,7 @@ class PixivExtractor(Extractor):
directory_fmt = ("{category}", "{user[id]} {user[account]}")
filename_fmt = "{id}_p{num}.{extension}"
archive_fmt = "{id}{suffix}.{extension}"
+ cookiedomain = None
def __init__(self, match):
Extractor.__init__(self, match)
@@ -141,10 +143,11 @@ class PixivMeExtractor(PixivExtractor):
def items(self):
url = "https://pixiv.me/" + self.account
+ data = {"_extractor": PixivUserExtractor}
response = self.request(
url, method="HEAD", allow_redirects=False, notfound="user")
yield Message.Version, 1
- yield Message.Queue, response.headers["Location"], {}
+ yield Message.Queue, response.headers["Location"], data
class PixivWorkExtractor(PixivExtractor):
@@ -217,6 +220,11 @@ class PixivFavoriteExtractor(PixivExtractor):
("https://www.pixiv.net/bookmark.php", {
"url": "90c1715b07b0d1aad300bce256a0bc71f42540ba",
}),
+ # followed users (#515)
+ ("https://www.pixiv.net/bookmark.php?id=173530&type=user", {
+ "pattern": PixivUserExtractor.pattern,
+ "count": ">= 12",
+ }),
# touch URLs
("https://touch.pixiv.net/bookmark.php?id=173530"),
("https://touch.pixiv.net/bookmark.php"),
@@ -227,6 +235,9 @@ class PixivFavoriteExtractor(PixivExtractor):
self.query = text.parse_query(match.group(1))
if "id" not in self.query:
self.subcategory = "bookmark"
+ elif self.query.get("type") == "user":
+ self.subcategory = "following"
+ self.items = self._items_following
def works(self):
tag = None
@@ -249,6 +260,15 @@ class PixivFavoriteExtractor(PixivExtractor):
self.user_id = user["id"]
return {"user_bookmark": user}
+ def _items_following(self):
+ yield Message.Version, 1
+
+ for preview in self.api.user_following(self.query["id"]):
+ user = preview["user"]
+ user["_extractor"] = PixivUserExtractor
+ url = "https://www.pixiv.net/member.php?id={}".format(user["id"])
+ yield Message.Queue, url, user
+
class PixivRankingExtractor(PixivExtractor):
"""Extractor for pixiv ranking pages"""
@@ -493,6 +513,10 @@ class PixivAppAPI():
params = {"user_id": user_id}
return self._call("v1/user/detail", params)["user"]
+ def user_following(self, user_id):
+ params = {"user_id": user_id}
+ return self._pagination("v1/user/following", params, "user_previews")
+
def user_illusts(self, user_id):
params = {"user_id": user_id}
return self._pagination("v1/user/illusts", params)
@@ -506,17 +530,25 @@ class PixivAppAPI():
self.login()
response = self.extractor.request(url, params=params, fatal=False)
+ data = response.json()
+
+ if "error" in data:
+ if response.status_code == 404:
+ raise exception.NotFoundError()
+
+ error = data["error"]
+ if "rate limit" in (error.get("message") or "").lower():
+ self.log.info("Waiting two minutes for API rate limit reset.")
+ time.sleep(120)
+ return self._call(endpoint, params)
+ raise exception.StopExtraction("API request failed: %s", error)
- if response.status_code < 400:
- return response.json()
- if response.status_code == 404:
- raise exception.NotFoundError()
- raise exception.StopExtraction("API request failed: %s", response.text)
+ return data
- def _pagination(self, endpoint, params):
+ def _pagination(self, endpoint, params, key="illusts"):
while True:
data = self._call(endpoint, params)
- yield from data["illusts"]
+ yield from data[key]
if not data["next_url"]:
return