diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0e193b88..fa2d48f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0 + rev: v6.0.0 # v6.0.0 hooks: - id: check-json types: [file] # override `types: [json]` @@ -24,43 +24,43 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/tox-dev/pyproject-fmt - rev: 91de51aef61c5f2383f03da25604e3d65a8309e0 # v2.21.1 + rev: v2.21.1 # v2.21.1 hooks: - id: pyproject-fmt - repo: https://github.com/abravalheri/validate-pyproject - rev: 4b2e70d08cb2ccd26d1fba73588de41c7a5d50b7 # v0.25 + rev: v0.25 # v0.25 hooks: - id: validate-pyproject - repo: https://github.com/sphinx-contrib/sphinx-lint - rev: c883505f64b59c3c5c9375191e4ad9f98e727ccd # v1.0.2 + rev: v1.0.2 # v1.0.2 hooks: - id: sphinx-lint types: [rst] - repo: https://github.com/pycqa/isort - rev: dac090ce4d9ee313d086e2e89ab1acb8c2664fa1 # 9.0.0a3 + rev: 9.0.0a3 # 9.0.0a3 hooks: - id: isort additional_dependencies: ["toml"] entry: isort --profile=black name: isort (python) - repo: https://github.com/psf/black-pre-commit-mirror - rev: fa505ab9c3e0fedafe1709fd7ac2b5f8996c670d # 26.3.1 + rev: 26.3.1 # 26.3.1 hooks: - id: black - repo: https://github.com/tonybaloney/perflint - rev: 22f831509bc7765ce272ad6fcb99398d86a26a52 # 0.8.1 + rev: 0.8.1 # 0.8.1 hooks: - id: perflint exclude: ^docs/ - repo: https://github.com/adamchainz/blacken-docs - rev: fda77690955e9b63c6687d8806bafd56a526e45f # 1.20.0 + rev: 1.20.0 # 1.20.0 hooks: - id: blacken-docs args: [--line-length=79] additional_dependencies: - black - repo: https://github.com/codespell-project/codespell - rev: 2ccb47ff45ad361a21071a7eedda4c37e6ae8c5a # v2.4.2 + rev: v2.4.2 # v2.4.2 hooks: - id: codespell args: [--toml pyproject.toml] diff --git a/docs/data-processing/apis/fastapi/example.rst b/docs/data-processing/apis/fastapi/example.rst index 85740215..6ea1b316 100644 --- a/docs/data-processing/apis/fastapi/example.rst +++ b/docs/data-processing/apis/fastapi/example.rst @@ -16,7 +16,6 @@ Create a file :file:`main.py` with: from fastapi import FastAPI - app = FastAPI() @@ -77,7 +76,6 @@ Now we modify the file ``main.py`` to receive a body from a ``PUT`` request: from fastapi import FastAPI - app = FastAPI() diff --git a/docs/data-processing/apis/grpc/accounts_pb2.py b/docs/data-processing/apis/grpc/accounts_pb2.py index d81d3347..3c111ada 100644 --- a/docs/data-processing/apis/grpc/accounts_pb2.py +++ b/docs/data-processing/apis/grpc/accounts_pb2.py @@ -292,7 +292,9 @@ _GETACCOUNTSREQUEST.fields_by_name["account"].message_type = _ACCOUNT _GETACCOUNTSRESULT.fields_by_name["account"].message_type = _ACCOUNT DESCRIPTOR.message_types_by_name["Account"] = _ACCOUNT -DESCRIPTOR.message_types_by_name["CreateAccountRequest"] = _CREATEACCOUNTREQUEST +DESCRIPTOR.message_types_by_name["CreateAccountRequest"] = ( + _CREATEACCOUNTREQUEST +) DESCRIPTOR.message_types_by_name["CreateAccountResult"] = _CREATEACCOUNTRESULT DESCRIPTOR.message_types_by_name["GetAccountsRequest"] = _GETACCOUNTSREQUEST DESCRIPTOR.message_types_by_name["GetAccountsResult"] = _GETACCOUNTSRESULT diff --git a/docs/data-processing/apis/grpc/tests/test_accounts.py b/docs/data-processing/apis/grpc/tests/test_accounts.py index 77f660b1..0608f451 100644 --- a/docs/data-processing/apis/grpc/tests/test_accounts.py +++ b/docs/data-processing/apis/grpc/tests/test_accounts.py @@ -36,7 +36,10 @@ def test_create_account(grpc_stub): request = CreateAccountRequest(account_name=value) response = grpc_stub.CreateAccount(request) - assert f"{response.account}" == f'account_id: 1{nl}account_name: "test-data"{nl}' + assert ( + f"{response.account}" + == f'account_id: 1{nl}account_name: "test-data"{nl}' + ) def test_get_accounts(grpc_stub): @@ -64,7 +67,9 @@ def grpc_server(_grpc_server, grpc_addr, my_ssl_key_path, my_ssl_cert_path): @pytest.fixture(scope="module") def my_channel_ssl_credentials(my_ssl_cert_path): # If we're using self-signed certificate it's necessarily to pass root certificate to channel - return grpc.ssl_channel_credentials(root_certificates=my_ssl_cert_path.read_bytes()) + return grpc.ssl_channel_credentials( + root_certificates=my_ssl_cert_path.read_bytes() + ) @pytest.fixture(scope="module") diff --git a/docs/data-processing/postgresql/sqlalchemy.rst b/docs/data-processing/postgresql/sqlalchemy.rst index cfd92280..1a817d44 100644 --- a/docs/data-processing/postgresql/sqlalchemy.rst +++ b/docs/data-processing/postgresql/sqlalchemy.rst @@ -44,7 +44,6 @@ Database connection from sqlalchemy import create_engine - engine = create_engine("postgresql:///example", echo=True) Data model @@ -56,7 +55,6 @@ Data model from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship - Base = declarative_base() diff --git a/docs/data-processing/serialisation-formats/toml/index.rst b/docs/data-processing/serialisation-formats/toml/index.rst index 73162d8f..4a639c9e 100644 --- a/docs/data-processing/serialisation-formats/toml/index.rst +++ b/docs/data-processing/serialisation-formats/toml/index.rst @@ -39,7 +39,6 @@ Overview import toml - config = toml.load("pyproject.toml") .. seealso:: diff --git a/docs/performance/index.rst b/docs/performance/index.rst index b66c6a2c..f5c5bb7b 100644 --- a/docs/performance/index.rst +++ b/docs/performance/index.rst @@ -52,7 +52,6 @@ We can create sample data with: from sklearn.datasets import make_blobs - points, labels_true = make_blobs( n_samples=1000, centers=3, random_state=0, cluster_std=0.60 ) @@ -133,7 +132,6 @@ algorithm: from sklearn.cluster import KMeans - KMeans(10).fit_predict(points) * `dask_ml.cluster.KMeans @@ -143,7 +141,6 @@ algorithm: from dask_ml.cluster import KMeans - KMeans(10).fit(points).predict(points) The best that could be said against these existing solutions is that they could diff --git a/docs/performance/nb_kmeans.py b/docs/performance/nb_kmeans.py index 9f4f7ebe..acc14093 100644 --- a/docs/performance/nb_kmeans.py +++ b/docs/performance/nb_kmeans.py @@ -41,7 +41,9 @@ def compute_centers(points, labels): counts[label] += 1 centers[label] = [a + b for a, b in zip(centers[label], point)] - return [[x / count for x in center] for center, count in zip(centers, counts)] + return [ + [x / count for x in center] for center, count in zip(centers, counts) + ] def kmeans(points, n_clusters): diff --git a/docs/performance/np_kmeans.py b/docs/performance/np_kmeans.py index 5a459800..11583ef5 100644 --- a/docs/performance/np_kmeans.py +++ b/docs/performance/np_kmeans.py @@ -24,7 +24,9 @@ def compute_centers(points, labels): counts[label] += 1 centers[label] = [a + b for a, b in zip(centers[label], point)] - return [[x / count for x in center] for center, count in zip(centers, counts)] + return [ + [x / count for x in center] for center, count in zip(centers, counts) + ] def kmeans(points, n_clusters): diff --git a/docs/performance/py_kmeans.py b/docs/performance/py_kmeans.py index cd50ade0..5edffbf7 100644 --- a/docs/performance/py_kmeans.py +++ b/docs/performance/py_kmeans.py @@ -29,7 +29,9 @@ def compute_centers(points, labels): counts[label] += 1 centers[label] = [a + b for a, b in zip(centers[label], point)] - return [[x / count for x in center] for center, count in zip(centers, counts)] + return [ + [x / count for x in center] for center, count in zip(centers, counts) + ] def kmeans(points, n_clusters): diff --git a/docs/productive/qa/requests/__init__.py b/docs/productive/qa/requests/__init__.py index 9765690a..68b982b7 100644 --- a/docs/productive/qa/requests/__init__.py +++ b/docs/productive/qa/requests/__init__.py @@ -57,9 +57,13 @@ chardet_version = None -def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): +def check_compatibility( + urllib3_version, chardet_version, charset_normalizer_version +): urllib3_version = urllib3_version.split(".") - assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + assert urllib3_version != [ + "dev" + ] # Verify urllib3 isn't installed from git. # Sometimes, urllib3 only reports its version as 16.1. if len(urllib3_version) == 2: @@ -85,7 +89,9 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver # charset_normalizer >= 2.0.0 < 3.0.0 assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) else: - raise Exception("You need either charset_normalizer or chardet installed") + raise Exception( + "You need either charset_normalizer or chardet installed" + ) def _check_cryptography(cryptography_version): @@ -96,8 +102,10 @@ def _check_cryptography(cryptography_version): return if cryptography_version < [1, 3, 4]: - warning = "Old version of cryptography ({}) may cause slowdown.".format( - cryptography_version + warning = ( + "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) ) warnings.warn(warning, RequestsDependencyWarning) diff --git a/docs/productive/qa/requests/adapters.py b/docs/productive/qa/requests/adapters.py index f2a84e94..93e733b7 100644 --- a/docs/productive/qa/requests/adapters.py +++ b/docs/productive/qa/requests/adapters.py @@ -23,10 +23,7 @@ ProtocolError, ) from urllib3.exceptions import ProxyError as _ProxyError -from urllib3.exceptions import ( - ReadTimeoutError, - ResponseError, -) +from urllib3.exceptions import ReadTimeoutError, ResponseError from urllib3.exceptions import SSLError as _SSLError from urllib3.poolmanager import PoolManager, proxy_from_url from urllib3.response import HTTPResponse @@ -81,7 +78,13 @@ def __init__(self): super(BaseAdapter, self).__init__() def send( - self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + self, + request, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, ): """Sends PreparedRequest object. Returns Response object. @@ -435,12 +438,20 @@ def proxy_headers(self, proxy): username, password = get_auth_from_url(proxy) if username: - headers["Proxy-Authorization"] = _basic_auth_str(username, password) + headers["Proxy-Authorization"] = _basic_auth_str( + username, password + ) return headers def send( - self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + self, + request, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, ): """Sends PreparedRequest object. Returns Response object. @@ -474,7 +485,9 @@ def send( proxies=proxies, ) - chunked = not (request.body is None or "Content-Length" in request.headers) + chunked = not ( + request.body is None or "Content-Length" in request.headers + ) if isinstance(timeout, tuple): try: @@ -516,7 +529,9 @@ def send( low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: - low_conn.putrequest(request.method, url, skip_accept_encoding=True) + low_conn.putrequest( + request.method, url, skip_accept_encoding=True + ) for header, value in request.headers.items(): low_conn.putheader(header, value) diff --git a/docs/productive/qa/requests/auth.py b/docs/productive/qa/requests/auth.py index 573dc015..ea46ac1b 100644 --- a/docs/productive/qa/requests/auth.py +++ b/docs/productive/qa/requests/auth.py @@ -94,7 +94,9 @@ def __ne__(self, other): return not self == other def __call__(self, r): - r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + r.headers["Authorization"] = _basic_auth_str( + self.username, self.password + ) return r @@ -102,7 +104,9 @@ class HTTPProxyAuth(HTTPBasicAuth): """Attaches HTTP Proxy Authentication to a given Request object.""" def __call__(self, r): - r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + r.headers["Proxy-Authorization"] = _basic_auth_str( + self.username, self.password + ) return r @@ -220,12 +224,16 @@ def sha512_utf8(x): self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' 'response="%s"' % ( - self.username, - realm, - nonce, - path, - respdig, + base = ( + 'username="%s", realm="%s", nonce="%s", uri="%s", ' + 'response="%s"' + % ( + self.username, + realm, + nonce, + path, + respdig, + ) ) if opaque: base += ', opaque="%s"' % opaque @@ -265,7 +273,9 @@ def handle_401(self, r, **kwargs): if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: self._thread_local.num_401_calls += 1 pat = re.compile(r"digest ", flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + self._thread_local.chal = parse_dict_header( + pat.sub("", s_auth, count=1) + ) # Consume content and release the original connection # to allow our new request to reuse the same one. @@ -292,7 +302,9 @@ def __call__(self, r): self.init_per_thread_state() # If we have a saved nonce, skip the 401 if self._thread_local.last_nonce: - r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + r.headers["Authorization"] = self.build_digest_header( + r.method, r.url + ) try: self._thread_local.pos = r.body.tell() except AttributeError: diff --git a/docs/productive/qa/requests/cookies.py b/docs/productive/qa/requests/cookies.py index 186290fb..d73a64f7 100644 --- a/docs/productive/qa/requests/cookies.py +++ b/docs/productive/qa/requests/cookies.py @@ -130,7 +130,9 @@ def extract_cookies_to_jar(jar, request, response): :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ - if not (hasattr(response, "_original_response") and response._original_response): + if not ( + hasattr(response, "_original_response") and response._original_response + ): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) @@ -213,7 +215,10 @@ def set(self, name, value, **kwargs): # support client code that unsets cookies by assignment of a None value: if value is None: remove_cookie_by_name( - self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + self, + name, + domain=kwargs.get("domain"), + path=kwargs.get("path"), ) return @@ -355,7 +360,9 @@ def set_cookie(self, cookie, *args, **kwargs): and cookie.value.endswith('"') ): cookie.value = cookie.value.replace('\\"', "") - return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) + return super(RequestsCookieJar, self).set_cookie( + cookie, *args, **kwargs + ) def update(self, other): """Updates this jar with cookies from another CookieJar or dict-like""" @@ -406,7 +413,8 @@ def _find_no_duplicates(self, name, domain=None, path=None): toReturn is not None ): # if there are multiple cookies that meet passed in criteria raise CookieConflictError( - "There are multiple cookies with name, %r" % (name) + "There are multiple cookies with name, %r" + % (name) ) toReturn = ( cookie.value @@ -503,7 +511,9 @@ def morsel_to_cookie(morsel): raise TypeError("max-age: %s must be integer" % morsel["max-age"]) elif morsel["expires"]: time_template = "%a, %d-%b-%Y %H:%M:%S GMT" - expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + expires = calendar.timegm( + time.strptime(morsel["expires"], time_template) + ) return create_cookie( comment=morsel["comment"], comment_url=bool(morsel["comment"]), @@ -553,7 +563,9 @@ def merge_cookies(cookiejar, cookies): raise ValueError("You can only merge into CookieJar") if isinstance(cookies, dict): - cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + cookiejar = cookiejar_from_dict( + cookies, cookiejar=cookiejar, overwrite=False + ) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) diff --git a/docs/productive/qa/requests/exceptions.py b/docs/productive/qa/requests/exceptions.py index eef0fd55..ce4fe7c5 100644 --- a/docs/productive/qa/requests/exceptions.py +++ b/docs/productive/qa/requests/exceptions.py @@ -20,7 +20,11 @@ def __init__(self, *args, **kwargs): response = kwargs.pop("response", None) self.response = response self.request = kwargs.pop("request", None) - if response is not None and not self.request and hasattr(response, "request"): + if ( + response is not None + and not self.request + and hasattr(response, "request") + ): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs) diff --git a/docs/productive/qa/requests/help.py b/docs/productive/qa/requests/help.py index 4f065462..a9233273 100644 --- a/docs/productive/qa/requests/help.py +++ b/docs/productive/qa/requests/help.py @@ -107,7 +107,9 @@ def info(): } system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = {"version": "%x" % system_ssl if system_ssl is not None else ""} + system_ssl_info = { + "version": "%x" % system_ssl if system_ssl is not None else "" + } return { "platform": platform_info, diff --git a/docs/productive/qa/requests/models.py b/docs/productive/qa/requests/models.py index f8bf7b04..348b2e5c 100644 --- a/docs/productive/qa/requests/models.py +++ b/docs/productive/qa/requests/models.py @@ -39,12 +39,7 @@ is_py2, ) from .compat import json as complexjson -from .compat import ( - str, - urlencode, - urlsplit, - urlunparse, -) +from .compat import str, urlencode, urlsplit, urlunparse from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header from .exceptions import ( ChunkedEncodingError, @@ -223,7 +218,9 @@ def register_hook(self, event, hook): if isinstance(hook, Callable): self.hooks[event].append(hook) elif hasattr(hook, "__iter__"): - self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + self.hooks[event].extend( + h for h in hook if isinstance(h, Callable) + ) def deregister_hook(self, event, hook): """Deregister a previously registered hook. @@ -445,9 +442,7 @@ def prepare_url(self, url, params): raise InvalidURL(*e.args) if not scheme: - error = ( - "Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?" - ) + error = "Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?" error = error.format(to_native_string(url, "utf8")) raise MissingSchema(error) @@ -501,7 +496,9 @@ def prepare_url(self, url, params): else: query = enc_params - url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + url = requote_uri( + urlunparse([scheme, netloc, path, None, query, fragment]) + ) self.url = url def prepare_headers(self, headers): @@ -795,7 +792,9 @@ def is_redirect(self): """True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). """ - return "location" in self.headers and self.status_code in REDIRECT_STATI + return ( + "location" in self.headers and self.status_code in REDIRECT_STATI + ) @property def is_permanent_redirect(self): @@ -836,7 +835,9 @@ def generate(): # Special case for urllib3. if hasattr(self.raw, "stream"): try: - for chunk in self.raw.stream(chunk_size, decode_content=True): + for chunk in self.raw.stream( + chunk_size, decode_content=True + ): yield chunk except ProtocolError as e: raise ChunkedEncodingError(e) @@ -858,7 +859,8 @@ def generate(): raise StreamConsumedError() elif chunk_size is not None and not isinstance(chunk_size, int): raise TypeError( - "chunk_size must be an int, it is instead a %s." % type(chunk_size) + "chunk_size must be an int, it is instead a %s." + % type(chunk_size) ) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) @@ -913,12 +915,16 @@ def content(self): if self._content is False: # Read the contents. if self._content_consumed: - raise RuntimeError("The content for this response was already consumed") + raise RuntimeError( + "The content for this response was already consumed" + ) if self.status_code == 0 or self.raw is None: self._content = None else: - self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + self._content = ( + b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + ) self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 @@ -983,7 +989,9 @@ def json(self, **kwargs): encoding = guess_json_utf(self.content) if encoding is not None: try: - return complexjson.loads(self.content.decode(encoding), **kwargs) + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, diff --git a/docs/productive/qa/requests/packages.py b/docs/productive/qa/requests/packages.py index 6b14db4a..d42d094b 100644 --- a/docs/productive/qa/requests/packages.py +++ b/docs/productive/qa/requests/packages.py @@ -7,7 +7,9 @@ import charset_normalizer as chardet - warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") + warnings.filterwarnings( + "ignore", "Trying to detect", module="charset_normalizer" + ) # This code exists for backwards compatibility reasons. # I don't like it either. Just look the other way. :) @@ -23,7 +25,7 @@ target = chardet.__name__ for mod in list(sys.modules): if mod == target or mod.startswith(target + "."): - sys.modules["requests.packages." + target.replace(target, "chardet")] = ( - sys.modules[mod] - ) + sys.modules[ + "requests.packages." + target.replace(target, "chardet") + ] = sys.modules[mod] # Kinda cool, though, right? diff --git a/docs/productive/qa/requests/sessions.py b/docs/productive/qa/requests/sessions.py index f0598abf..24f634ed 100644 --- a/docs/productive/qa/requests/sessions.py +++ b/docs/productive/qa/requests/sessions.py @@ -33,7 +33,12 @@ from .hooks import default_hooks, dispatch_hook # formerly defined here, reexposed here for backward compatibility -from .models import DEFAULT_REDIRECT_LIMIT, REDIRECT_STATI, PreparedRequest, Request +from .models import ( + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) from .status_codes import codes from .structures import CaseInsensitiveDict from .utils import ( @@ -72,7 +77,8 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): # Bypass if not a dictionary (e.g. verify) if not ( - isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + isinstance(session_setting, Mapping) + and isinstance(request_setting, Mapping) ): return request_setting @@ -190,7 +196,8 @@ def resolve_redirects( if len(resp.history) >= self.max_redirects: raise TooManyRedirects( - "Exceeded {} redirects.".format(self.max_redirects), response=resp + "Exceeded {} redirects.".format(self.max_redirects), + response=resp, ) # Release the connection back into the pool. @@ -227,7 +234,11 @@ def resolve_redirects( codes.permanent_redirect, ): # https://github.com/psf/requests/issues/3490 - purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + purged_headers = ( + "Content-Length", + "Content-Type", + "Transfer-Encoding", + ) for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None @@ -274,7 +285,9 @@ def resolve_redirects( **adapter_kwargs, ) - extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + extract_cookies_to_jar( + self.cookies, prepared_request, resp.raw + ) # extract redirect url, if any, for the next loop url = self.get_redirect_target(resp) @@ -337,7 +350,9 @@ def rebuild_proxies(self, prepared_request, proxies): username, password = None, None if username and password: - headers["Proxy-Authorization"] = _basic_auth_str(username, password) + headers["Proxy-Authorization"] = _basic_auth_str( + username, password + ) return new_proxies @@ -689,7 +704,9 @@ def send(self, request, **kwargs): kwargs.setdefault("stream", self.stream) kwargs.setdefault("verify", self.verify) kwargs.setdefault("cert", self.cert) - kwargs.setdefault("proxies", self.rebuild_proxies(request, self.proxies)) + kwargs.setdefault( + "proxies", self.rebuild_proxies(request, self.proxies) + ) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. @@ -745,7 +762,9 @@ def send(self, request, **kwargs): if not allow_redirects: try: r._next = next( - self.resolve_redirects(r, request, yield_requests=True, **kwargs) + self.resolve_redirects( + r, request, yield_requests=True, **kwargs + ) ) except StopIteration: pass @@ -772,9 +791,9 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): # Look for requests environment configuration and be compatible # with cURL. if verify is True or verify is None: - verify = os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get( - "CURL_CA_BUNDLE" - ) + verify = os.environ.get( + "REQUESTS_CA_BUNDLE" + ) or os.environ.get("CURL_CA_BUNDLE") # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) @@ -782,7 +801,12 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) - return {"verify": verify, "proxies": proxies, "stream": stream, "cert": cert} + return { + "verify": verify, + "proxies": proxies, + "stream": stream, + "cert": cert, + } def get_adapter(self, url): """ @@ -795,7 +819,9 @@ def get_adapter(self, url): return adapter # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for {!r}".format(url)) + raise InvalidSchema( + "No connection adapters were found for {!r}".format(url) + ) def close(self): """Closes all adapters and as such the session""" diff --git a/docs/productive/qa/requests/status_codes.py b/docs/productive/qa/requests/status_codes.py index 05ff06f2..b0bfed2c 100644 --- a/docs/productive/qa/requests/status_codes.py +++ b/docs/productive/qa/requests/status_codes.py @@ -61,7 +61,11 @@ 404: ("not_found", "-o-"), 405: ("method_not_allowed", "not_allowed"), 406: ("not_acceptable",), - 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 407: ( + "proxy_authentication_required", + "proxy_auth", + "proxy_authentication", + ), 408: ("request_timeout", "timeout"), 409: ("conflict",), 410: ("gone",), @@ -102,7 +106,11 @@ 507: ("insufficient_storage",), 509: ("bandwidth_limit_exceeded", "bandwidth"), 510: ("not_extended",), - 511: ("network_authentication_required", "network_auth", "network_authentication"), + 511: ( + "network_authentication_required", + "network_auth", + "network_authentication", + ), } codes = LookupDict(name="status_codes") diff --git a/docs/productive/qa/requests/structures.py b/docs/productive/qa/requests/structures.py index 8f90ab3b..04323756 100644 --- a/docs/productive/qa/requests/structures.py +++ b/docs/productive/qa/requests/structures.py @@ -64,7 +64,9 @@ def __len__(self): def lower_items(self): """Like iteritems(), but with all lowercase keys.""" - return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + return ( + (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() + ) def __eq__(self, other): if isinstance(other, Mapping): diff --git a/docs/productive/qa/requests/utils.py b/docs/productive/qa/requests/utils.py index 5a26ea85..959061f2 100644 --- a/docs/productive/qa/requests/utils.py +++ b/docs/productive/qa/requests/utils.py @@ -86,9 +86,13 @@ def proxy_bypass_registry(host): r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", ) # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + proxyEnable = int( + winreg.QueryValueEx(internetSettings, "ProxyEnable")[0] + ) # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + proxyOverride = winreg.QueryValueEx( + internetSettings, "ProxyOverride" + )[0] except OSError: return False if not proxyEnable or not proxyOverride: @@ -254,7 +258,12 @@ def get_netrc_auth(url, raise_errors=False): def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, "name", None) - if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + if ( + name + and isinstance(name, basestring) + and name[0] != "<" + and name[-1] != ">" + ): return os.path.basename(name) @@ -495,7 +504,9 @@ def get_encodings_from_content(content): ) charset_re = re.compile(r']', flags=re.I) - pragma_re = re.compile(r']', flags=re.I) + pragma_re = re.compile( + r']', flags=re.I + ) xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') return ( @@ -684,7 +695,9 @@ def address_in_network(ip, net): """ ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] netaddr, bits = net.split("/") - netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[ + 0 + ] network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask return (ipaddr & netmask) == (network & netmask) @@ -781,7 +794,9 @@ def should_bypass_proxies(url, no_proxy): if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the hostname, both with and without the port. - no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + no_proxy = ( + host for host in no_proxy.replace(" ", "").split(",") if host + ) if is_ipv4_address(parsed.hostname): for proxy_ip in no_proxy: @@ -798,7 +813,9 @@ def should_bypass_proxies(url, no_proxy): host_with_port += ":{}".format(parsed.port) for host in no_proxy: - if parsed.hostname.endswith(host) or host_with_port.endswith(host): + if parsed.hostname.endswith(host) or host_with_port.endswith( + host + ): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. return True @@ -1006,7 +1023,8 @@ def check_header_validity(header): try: if not pat.match(value): raise InvalidHeader( - "Invalid return character or leading space in header: %s" % name + "Invalid return character or leading space in header: %s" + % name ) except TypeError: raise InvalidHeader( @@ -1044,7 +1062,10 @@ def rewind_body(prepared_request): body_seek(prepared_request._body_position) except (IOError, OSError): raise UnrewindableBodyError( - "An error occurred when rewinding request " "body for redirect." + "An error occurred when rewinding request " + "body for redirect." ) else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") + raise UnrewindableBodyError( + "Unable to rewind request body for redirect." + )