aboutsummaryrefslogtreecommitdiffstats
path: root/gallery_dl/util.py
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2022-03-15 00:19:57 -0400
committerLibravatarUnit 193 <unit193@unit193.net>2022-03-15 00:19:57 -0400
commitc2e774d3f5a4499b8beb5a12ab46a0099b16b1e7 (patch)
treea14107397b5bcb491aa4f4fb3e0feb4582e1879b /gallery_dl/util.py
parent7900ee4e3692dbd8056c3e47c81bb22eda030b65 (diff)
New upstream version 1.21.0.upstream/1.21.0
Diffstat (limited to 'gallery_dl/util.py')
-rw-r--r--gallery_dl/util.py34
1 files changed, 33 insertions, 1 deletions
diff --git a/gallery_dl/util.py b/gallery_dl/util.py
index bccae2d..92d1620 100644
--- a/gallery_dl/util.py
+++ b/gallery_dl/util.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright 2017-2021 Mike Fährmann
+# Copyright 2017-2022 Mike Fährmann
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -12,6 +12,7 @@ import re
import os
import sys
import json
+import time
import random
import sqlite3
import binascii
@@ -20,6 +21,7 @@ import functools
import itertools
import urllib.parse
from http.cookiejar import Cookie
+from email.utils import mktime_tz, parsedate_tz
from . import text, exception
@@ -272,6 +274,15 @@ def remove_directory(path):
pass
+def set_mtime(path, mtime):
+ try:
+ if isinstance(mtime, str):
+ mtime = mktime_tz(parsedate_tz(mtime))
+ os.utime(path, (time.time(), mtime))
+ except Exception:
+ pass
+
+
def load_cookiestxt(fp):
"""Parse a Netscape cookies.txt file and return a list of its Cookies"""
cookies = []
@@ -413,6 +424,7 @@ GLOBALS = {
"parse_int": text.parse_int,
"urlsplit" : urllib.parse.urlsplit,
"datetime" : datetime.datetime,
+ "timedelta": datetime.timedelta,
"abort" : raises(exception.StopExtraction),
"terminate": raises(exception.TerminateExtraction),
"re" : re,
@@ -510,6 +522,26 @@ def build_extractor_filter(categories, negate=True, special=None):
return lambda extr: any(t(extr) for t in tests)
+def build_proxy_map(proxies, log=None):
+ """Generate a proxy map"""
+ if not proxies:
+ return None
+
+ if isinstance(proxies, str):
+ if "://" not in proxies:
+ proxies = "http://" + proxies.lstrip("/")
+ return {"http": proxies, "https": proxies}
+
+ if isinstance(proxies, dict):
+ for scheme, proxy in proxies.items():
+ if "://" not in proxy:
+ proxies[scheme] = "http://" + proxy.lstrip("/")
+ return proxies
+
+ if log:
+ log.warning("invalid proxy specifier: %s", proxies)
+
+
def build_predicate(predicates):
if not predicates:
return lambda url, kwdict: True