aboutsummaryrefslogtreecommitdiffstats
path: root/gallery_dl/job.py
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2024-08-03 20:27:44 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2024-08-03 20:27:44 -0400
commit032e5bed275a253e122ed9ac86dac7b8c4204172 (patch)
treeb4eda52ebfe00c4d22e9d633b1ab2d158a9f0573 /gallery_dl/job.py
parent80e39a8fc7de105510cbbdca8507f2a4b8c9e01d (diff)
New upstream version 1.27.2.upstream/1.27.2
Diffstat (limited to 'gallery_dl/job.py')
-rw-r--r--gallery_dl/job.py63
1 files changed, 46 insertions, 17 deletions
diff --git a/gallery_dl/job.py b/gallery_dl/job.py
index 4562b05..0e0916d 100644
--- a/gallery_dl/job.py
+++ b/gallery_dl/job.py
@@ -33,6 +33,7 @@ stdout_write = output.stdout_write
class Job():
"""Base class for Job types"""
ulog = None
+ _logger_adapter = output.LoggerAdapter
def __init__(self, extr, parent=None):
if isinstance(extr, str):
@@ -77,9 +78,9 @@ class Job():
actions = extr.config("actions")
if actions:
- from .actions import parse
+ from .actions import LoggerAdapter, parse
+ self._logger_adapter = LoggerAdapter
self._logger_actions = parse(actions)
- self._wrap_logger = self._wrap_logger_actions
path_proxy = output.PathfmtProxy(self)
self._logger_extra = {
@@ -267,10 +268,7 @@ class Job():
return self._wrap_logger(logging.getLogger(name))
def _wrap_logger(self, logger):
- return output.LoggerAdapter(logger, self)
-
- def _wrap_logger_actions(self, logger):
- return output.LoggerAdapterActions(logger, self)
+ return self._logger_adapter(logger, self)
def _write_unsupported(self, url):
if self.ulog:
@@ -315,7 +313,7 @@ class DownloadJob(Job):
pathfmt.build_path()
if pathfmt.exists():
- if archive:
+ if archive and self._archive_write_skip:
archive.add(kwdict)
self.handle_skip()
return
@@ -345,7 +343,7 @@ class DownloadJob(Job):
return
if not pathfmt.temppath:
- if archive:
+ if archive and self._archive_write_skip:
archive.add(kwdict)
self.handle_skip()
return
@@ -359,7 +357,7 @@ class DownloadJob(Job):
pathfmt.finalize()
self.out.success(pathfmt.path)
self._skipcnt = 0
- if archive:
+ if archive and self._archive_write_file:
archive.add(kwdict)
if "after" in hooks:
for callback in hooks["after"]:
@@ -561,6 +559,16 @@ class DownloadJob(Job):
else:
extr.log.debug("Using download archive '%s'", archive_path)
+ events = cfg("archive-event")
+ if events is None:
+ self._archive_write_file = True
+ self._archive_write_skip = False
+ else:
+ if isinstance(events, str):
+ events = events.split(",")
+ self._archive_write_file = ("file" in events)
+ self._archive_write_skip = ("skip" in events)
+
skip = cfg("skip", True)
if skip:
self._skipexc = None
@@ -676,7 +684,7 @@ class SimulationJob(DownloadJob):
kwdict["extension"] = "jpg"
if self.sleep:
self.extractor.sleep(self.sleep(), "download")
- if self.archive:
+ if self.archive and self._archive_write_skip:
self.archive.add(kwdict)
self.out.skip(self.pathfmt.build_filename(kwdict))
@@ -848,16 +856,22 @@ class InfoJob(Job):
class DataJob(Job):
"""Collect extractor results and dump them"""
+ resolve = False
- def __init__(self, url, parent=None, file=sys.stdout, ensure_ascii=True):
+ def __init__(self, url, parent=None, file=sys.stdout, ensure_ascii=True,
+ resolve=False):
Job.__init__(self, url, parent)
self.file = file
self.data = []
self.ascii = config.get(("output",), "ascii", ensure_ascii)
+ self.resolve = 128 if resolve is True else (resolve or self.resolve)
private = config.get(("output",), "private")
self.filter = dict.copy if private else util.filter_dict
+ if self.resolve > 0:
+ self.handle_queue = self.handle_queue_resolve
+
def run(self):
self._init()
@@ -883,12 +897,13 @@ class DataJob(Job):
for msg in self.data:
util.transform_dict(msg[-1], util.number_to_string)
- # dump to 'file'
- try:
- util.dump_json(self.data, self.file, self.ascii, 2)
- self.file.flush()
- except Exception:
- pass
+ if self.file:
+ # dump to 'file'
+ try:
+ util.dump_json(self.data, self.file, self.ascii, 2)
+ self.file.flush()
+ except Exception:
+ pass
return 0
@@ -900,3 +915,17 @@ class DataJob(Job):
def handle_queue(self, url, kwdict):
self.data.append((Message.Queue, url, self.filter(kwdict)))
+
+ def handle_queue_resolve(self, url, kwdict):
+ cls = kwdict.get("_extractor")
+ if cls:
+ extr = cls.from_url(url)
+ else:
+ extr = extractor.find(url)
+
+ if not extr:
+ return self.data.append((Message.Queue, url, self.filter(kwdict)))
+
+ job = self.__class__(extr, self, None, self.ascii, self.resolve-1)
+ job.data = self.data
+ job.run()