PKQZY= __init__.pynu[""" Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more """ from __future__ import annotations # Set default logging handler to avoid "No handler found" warnings. import logging import typing import warnings from logging import NullHandler from . import exceptions from ._base_connection import _TYPE_BODY from ._collections import HTTPHeaderDict from ._version import __version__ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url from .filepost import _TYPE_FIELDS, encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import BaseHTTPResponse, HTTPResponse from .util.request import make_headers from .util.retry import Retry from .util.timeout import Timeout # Ensure that Python is compiled with OpenSSL 1.1.1+ # If the 'ssl' module isn't available at all that's # fine, we only care if the module is available. try: import ssl except ImportError: pass else: if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive: warnings.warn( "urllib3 v2.0 only supports OpenSSL 1.1.1+, currently " f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " "See: https://github.com/urllib3/urllib3/issues/3020", exceptions.NotOpenSSLWarning, ) elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive: raise ImportError( "urllib3 v2.0 only supports OpenSSL 1.1.1+, currently " f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " "See: https://github.com/urllib3/urllib3/issues/2168" ) # === NOTE TO REPACKAGERS AND VENDORS === # Please delete this block, this logic is only # for urllib3 being distributed via PyPI. # See: https://github.com/urllib3/urllib3/issues/2680 try: import urllib3_secure_extra # type: ignore # noqa: F401 except ModuleNotFoundError: pass else: warnings.warn( "'urllib3[secure]' extra is deprecated and will be removed " "in urllib3 v2.1.0. Read more in this issue: " "https://github.com/urllib3/urllib3/issues/2680", category=DeprecationWarning, stacklevel=2, ) __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" __version__ = __version__ __all__ = ( "HTTPConnectionPool", "HTTPHeaderDict", "HTTPSConnectionPool", "PoolManager", "ProxyManager", "HTTPResponse", "Retry", "Timeout", "add_stderr_logger", "connection_from_url", "disable_warnings", "encode_multipart_formdata", "make_headers", "proxy_from_url", "request", "BaseHTTPResponse", ) logging.getLogger(__name__).addHandler(NullHandler()) def add_stderr_logger( level: int = logging.DEBUG, ) -> logging.StreamHandler[typing.TextIO]: """ Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. """ # This method needs to be in this __init__.py to get the __name__ correct # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) logger.addHandler(handler) logger.setLevel(level) logger.debug("Added a stderr logging handler to logger: %s", __name__) return handler # ... Clean up. del NullHandler # All warning filters *must* be appended unless you're really certain that they # shouldn't be: otherwise, it's very hard for users to use most Python # mechanisms to silence them. # SecurityWarning's always go off by default. warnings.simplefilter("always", exceptions.SecurityWarning, append=True) # InsecurePlatformWarning's don't vary between requests, so we keep it default. warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None: """ Helper for quickly disabling all urllib3 warnings. """ warnings.simplefilter("ignore", category) _DEFAULT_POOL = PoolManager() def request( method: str, url: str, *, body: _TYPE_BODY | None = None, fields: _TYPE_FIELDS | None = None, headers: typing.Mapping[str, str] | None = None, preload_content: bool | None = True, decode_content: bool | None = True, redirect: bool | None = True, retries: Retry | bool | int | None = None, timeout: Timeout | float | int | None = 3, json: typing.Any | None = None, ) -> BaseHTTPResponse: """ A convenience, top-level request method. It uses a module-global ``PoolManager`` instance. Therefore, its side effects could be shared across dependencies relying on it. To avoid side effects create a new ``PoolManager`` instance and use it instead. The method does not accept low-level ``**urlopen_kw`` keyword arguments. """ return _DEFAULT_POOL.request( method, url, body=body, fields=fields, headers=headers, preload_content=preload_content, decode_content=decode_content, redirect=redirect, retries=retries, timeout=timeout, json=json, ) PKQZRb$__pycache__/__init__.cpython-311.pycnu[ bg dZddlmZddlZddlZddlZddlmZddlmZddl m Z ddl m Z dd l mZdd lmZmZmZdd lmZmZdd lmZmZmZdd lmZmZddlmZddlm Z ddl!m"Z" ddl#Z#e#j$%ds ej&de#j$dej'n+e#j(dkre)de#j$dn #e)$rYnwxYw ddl*Z*ej&de+dn #e,$rYnwxYwdZ-dZ.eZdZ/ej0e12eej3fd@d Z4[ej5d!ej6d"#ej5d$ej7d"#ej8fdAd(Z9eZ:dddd"d"d"dd)dd* dBd?Z;dS)Cze Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more ) annotationsN) NullHandler) exceptions) _TYPE_BODY)HTTPHeaderDict) __version__)HTTPConnectionPoolHTTPSConnectionPoolconnection_from_url) _TYPE_FIELDSencode_multipart_formdata) PoolManager ProxyManagerproxy_from_url)BaseHTTPResponse HTTPResponse) make_headers)Retry)TimeoutzOpenSSL zWurllib3 v2.0 only supports OpenSSL 1.1.1+, currently the 'ssl' module is compiled with z5. See: https://github.com/urllib3/urllib3/issues/3020)rrrz5. See: https://github.com/urllib3/urllib3/issues/2168z'urllib3[secure]' extra is deprecated and will be removed in urllib3 v2.1.0. Read more in this issue: https://github.com/urllib3/urllib3/issues/2680)category stacklevelz(Andrey Petrov (andrey.petrov@shazow.net)MIT)r rr rrrrradd_stderr_loggerr disable_warningsrrrrequestrlevelintreturn$logging.StreamHandler[typing.TextIO]c6tjt}tj}|tjd|||||dt|S)z Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. z%%(asctime)s %(levelname)s %(message)sz,Added a stderr logging handler to logger: %s) logging getLogger__name__ StreamHandler setFormatter Formatter addHandlersetLeveldebug)rloggerhandlers G/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/__init__.pyrrZs x ( (F#%%G *+RSSTTT g OOE LL?JJJ NalwaysT)appenddefaultr type[Warning]Nonec0tjd|dS)z< Helper for quickly disabling all urllib3 warnings. ignoreN)warnings simplefilter)rs r.rr{s (H-----r/ bodyfieldsheaderspreload_contentdecode_contentredirectretriestimeoutjsonmethodstrurlr;_TYPE_BODY | Noner<_TYPE_FIELDS | Noner=typing.Mapping[str, str] | Noner> bool | Noner?r@rARetry | bool | int | NonerBTimeout | float | int | NonerCtyping.Any | Nonerc Lt|||||||||| |  S)aX A convenience, top-level request method. It uses a module-global ``PoolManager`` instance. Therefore, its side effects could be shared across dependencies relying on it. To avoid side effects create a new ``PoolManager`` instance and use it instead. The method does not accept low-level ``**urlopen_kw`` keyword arguments. r:) _DEFAULT_POOLr) rDrFr;r<r=r>r?r@rArBrCs r.rrsB*   '%  !   r/)rrr r!)rr3r r4)rDrErFrEr;rGr<rHr=rIr>rJr?rJr@rJrArKrBrLrCrMr r)<__doc__ __future__rr#typingr7rr_base_connectionr _collectionsr_versionr connectionpoolr r r filepostr r poolmanagerrrrresponserr util.requestr util.retryr util.timeoutrsslOPENSSL_VERSION startswithwarnNotOpenSSLWarningOPENSSL_VERSION_INFO ImportErrorurllib3_secure_extraDeprecationWarningModuleNotFoundError __author__ __license____all__r$r%r)DEBUGrr8SecurityWarningInsecurePlatformWarning HTTPWarningrrOrr/r.rps#""""" ((((((((((((!!!!!!XXXXXXXXXX========BBBBBBBBBB44444444&&&&&&!!!!!!  JJJ   ) )* 5 5    B141D B B B  (     !I - -k B141D B B B    .   D * HM 9$    D 8    &(&&{{}}555*h :4HHHHi!CDQQQQ0:/E..... #"&/3#'"& )-,-"!!!!!!!!s$*CCCC,,C43C4PKQZɔV,__pycache__/_base_connection.cpython-311.pycnu[ bgVddlmZddlZddlmZddlmZmZddlm Z ej e ej ej eje efZGddejZGd d ejZejr1ddlZdd lmZmZdd lmZGd deZGddeeZdSdS)) annotationsN)_TYPE_SOCKET_OPTIONS)_DEFAULT_TIMEOUT _TYPE_TIMEOUT)Urlc8eZdZUded<ded<ded<ded<d S) ProxyConfigssl.SSLContext | None ssl_contextbooluse_forwarding_for_httpsNone | str | Literal[False]assert_hostname str | Noneassert_fingerprintN__name__ __module__ __qualname____annotations__O/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/_base_connection.pyr r sB&&&&""""0000""""""rr cBeZdZUded<ded<ded<ded<ded<dS) _ResponseOptionsstrrequest_method request_urlr preload_contentdecode_contentenforce_content_lengthNrrrrrrsP      rr)LiteralProtocol)BaseHTTPResponsec.eZdZUded<ded<ded<ded<d ed <ded <d ed <ded<ded<ded<ded<ded< d9eddddddd:d Z d;dd'd(d(d(d)d?d2Zd@d4Z d=d5Z e dAd6Z e dAd7Z e dAd8ZdS)BBaseHTTPConnectiontyping.ClassVar[int] default_port%typing.ClassVar[_TYPE_SOCKET_OPTIONS]default_socket_optionsrhostintportz None | floattimeout blocksizetuple[str, int] | Nonesource_address_TYPE_SOCKET_OPTIONS | Nonesocket_options Url | NoneproxyProxyConfig | None proxy_configr is_verifiedz bool | Noneproxy_is_verifiedN .)r/r2r0r4r6r8 int | NonerreturnNonecdSNr) selfr,r.r/r2r0r4r6r8s r__init__zBaseHTTPConnection.__init__7s  Crhttpheaderstyping.Mapping[str, str] | NoneschemecdSr@r)rAr,r.rDrFs r set_tunnelzBaseHTTPConnection.set_tunnelEs  CrcdSr@rrAs rconnectzBaseHTTPConnection.connectN CrFT)chunkedr r!r"methodurlbody_TYPE_BODY | NonerMr r!r"cdSr@r) rArNrOrPrDrMr r!r"s rrequestzBaseHTTPConnection.requestQs  Crr%cdSr@rrJs r getresponsezBaseHTTPConnection.getresponsebrLrcdSr@rrJs rclosezBaseHTTPConnection.closeerLrcdS)zWhether the connection either is brand new or has been previously closed. If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` properties must be False. NrrJs r is_closedzBaseHTTPConnection.is_closedhrcdS)zLWhether the connection is actively connected to any origin (proxy or target)NrrJs r is_connectedzBaseHTTPConnection.is_connectedorZrcdS)zWhether the connection has successfully connected to its proxy. This returns False if no proxy is in use. Used to determine whether errors are coming from the proxy layer or from tunnelling to the target origin. NrrJs rhas_connected_to_proxyz)BaseHTTPConnection.has_connected_to_proxysrZrr@)r,rr.r<r/rr2r1r0r-r4r3r6r5r8r7r=r>)NNrC) r,rr.r<rDrErFrr=r>)r=r>)NN)rNrrOrrPrQrDrErMr r r r!r r"r r=r>)r=r%)r=r )rrrrrrBrHrKrSrUrWpropertyrYr\r^rrrr'r'$s****EEEE       ....3333((((&&&& $  &659!:= $/3      " $7;          '+7;  "$(#'+/      "                _ _ _  _        rr'ceZdZUded<ded<ded<ded<d ed <d ed <d ed <d ed<ded<ded<ded<ded<d ed<d ed<d ed< d/eddddddddddddddddddddd0d.ZdS)1BaseHTTPSConnectionr(r)r*r+int | str | None cert_reqsrrrrr r ca_certs ca_cert_dirNone | str | bytes ca_cert_datar<ssl_minimum_versionssl_maximum_version ssl_version cert_filekey_file key_passwordNr;.)r/r2r0r4r6r8rcrrserver_hostnamer rdrergrhrirjrkrlrmr,rr.r/rr2r1r0r-r4r3r6r5r8r7rnr=r>cdSr@r)rAr,r.r/r2r0r4r6r8rcrrrnr rdrergrhrirjrkrlrms rrBzBaseHTTPSConnection.__init__s 4 Crr@).r,rr.r<r/rr2r1r0r-r4r3r6r5r8r7rcrbrrrrrnrr r rdrrerrgrfrhr<rir<rjrbrkrrlrrmrr=r>)rrrrrrBrrrrarazs8****EEEE $###4444&&&&**** (((( ('''''''%%%%     $  &659!:= $/3*.;?-1*.15#'&*/3.2.2,0$(#''+1        rra) __future__rtypingutil.connectionr util.timeoutrrutil.urlrUnionbytesIOAnyIterabler _TYPE_BODY NamedTupler r TYPE_CHECKINGssltyping_extensionsr#r$responser%r'rarrrrs"""""" 11111199999999 \%6:!68NPSS T #####&####!!!!!v(!!! PJJJ33333333******TTTTTXTTTl333330(33333{PPrPKQZ1.c.c(__pycache__/_collections.cpython-311.pycnu[ bgADddlmZddlZddlmZddlmZmZddlm Z ej rddl m Z Gdde Z d d gZejd Zejd Zejd Zejd ejeefejejeefdfZGddeZddZGdd ejeefejeefZGddejejeefZGdd ejeefZ dS)) annotationsN) OrderedDict)Enumauto)RLock)ProtocolceZdZddZd dZdS) HasGettableStringKeysreturntyping.Iterator[str]cdSNselfs K/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/_collections.pykeyszHasGettableStringKeys.keys CkeystrcdSrrrrs r __getitem__z!HasGettableStringKeys.__getitem__rrNr r rrr r)__name__ __module__ __qualname__rrrrrr r s<          rr RecentlyUsedContainerHTTPHeaderDict_KT_VT_DTc"eZdZeZdS) _SentinelN)rrrr not_passedrrrr&r&'sJJJrr& potentialobjectr ValidHTTPHeaderSource | Nonect|tr|St|tjr1tjtjt t f|St|tjrAtjtjtjt t f|St|dr%t|drtjd|SdS)Nrrr ) isinstancer!typingMappingcastrIterableTuplehasattr)r(s r%ensure_can_construct_http_header_dictr3+s)^,, Iv~ . . {6>#s(3Y??? Iv / /{6?6<S+ABINNN F # # =(I(I{2I>>>trceZdZUdZded<ded<ded<ded < ddfd ZddZd dZd!dZd"dZ d#dZ d$dZ d%dZ xZ S)&r a Provides a thread-safe dict-like container which maintains up to ``maxsize`` keys while throwing away the least-recently-used keys beyond ``maxsize``. :param maxsize: Maximum number of recent elements to retain. :param dispose_func: Every time an item is evicted from the container, ``dispose_func(value)`` is called. Callback which will get called ztyping.OrderedDict[_KT, _VT] _containerint_maxsize#typing.Callable[[_VT], None] | None dispose_funcrlock Nmaxsizer Nonect||_||_t |_t |_dSr)super__init__r7r9rr5rr:)rr<r9 __class__s rr@zRecentlyUsedContainer.__init__RsD  (%--GG rrr"r#c|j5|j|}||j|<|cdddS#1swxYwYdSr)r:r5pop)rritems rrz!RecentlyUsedContainer.__getitem__]s Y  ?&&s++D#'DOC                    &;??valuecd}|j5 ||j|f}||j|<nR#t$rE||j|<t |j|jkr|jd}YnwxYwdddn #1swxYwY|#|jr|\}}||dSdSdS)NF)last)r:r5rCKeyErrorlenr7popitemr9)rrrF evicted_item_ evicted_values r __setitem__z!RecentlyUsedContainer.__setitem__dsH Y G G G #DO$7$7$<$<< ',$$ G G G(-$t''$-77$(?#:#:#:#F#FL G G G G G G G G G G G G G G G G&  #(9 #+ A}   m , , , , , $ # # #s2B&3BA B?BBBBBc|j5|j|}dddn #1swxYwY|jr||dSdSr)r:r5rCr9)rrrFs r __delitem__z!RecentlyUsedContainer.__delitem__}s Y - -O'',,E - - - - - - - - - - - - - - -   %   e $ $ $ $ $ % %s /33cl|j5t|jcdddS#1swxYwYdSr)r:rJr5rs r__len__zRecentlyUsedContainer.__len__s{ Y ( (t'' ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (s )--typing.NoReturnc td)Nz7Iteration over this class is unlikely to be threadsafe.)NotImplementedErrorrs r__iter__zRecentlyUsedContainer.__iter__s! E   rc|j5t|j}|jdddn #1swxYwY|jr|D]}||dSdSr)r:listr5valuesclearr9)rrZrFs rr[zRecentlyUsedContainer.clears Y $ $$/002233F O ! ! # # # $ $ $ $ $ $ $ $ $ $ $ $ $ $ $   ) ) )!!%(((( ) ) ) )sAAAAset[_KT]c|j5t|jcdddS#1swxYwYdSr)r:setr5rrs rrzRecentlyUsedContainer.keyss Y / /t++--.. / / / / / / / / / / / / / / / / / /rE)r;N)r<r6r9r8r r=)rr"r r#)rr"rFr#r r=)rr"r r=r r6)r rT)r r=)r r\)rrr__doc____annotations__r@rrOrQrSrWr[r __classcell__rAs@rr r ?s  -,,,MMM5555KKK<@       ----2%%%%((((    ))))////////rc>eZdZUdZded<ddZdd Zdd ZddZdS)HTTPHeaderDictItemViewa HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of address. If we directly try to get an item with a particular name, we will get a string back that is the concatenated version of all the values: >>> d['X-Header-Name'] 'Value1, Value2, Value3' However, if we iterate over an HTTPHeaderDict's items, we will optionally combine these values based on whether combine=True was called when building up the dictionary >>> d = HTTPHeaderDict({"A": "1", "B": "foo"}) >>> d.add("A", "2", combine=True) >>> d.add("B", "bar") >>> list(d.items()) [ ('A', '1, 2'), ('B', 'foo'), ('B', 'bar'), ] This class conforms to the interface required by the MutableMapping ABC while also giving us the nonstandard iteration behavior we want; items with duplicate keys, ordered by time of first insertion. r!_headersheadersr r=c||_dSr)rf)rrgs rr@zHTTPHeaderDictItemView.__init__s  rr6chtt|jSr)rJrYrf iteritemsrs rrSzHTTPHeaderDictItemView.__len__s&4 //1122333r typing.Iterator[tuple[str, str]]c4|jSr)rfrjrs rrWzHTTPHeaderDictItemView.__iter__s}&&(((rrDr)boolct|tr]t|dkrJ|\}}t|tr0t|tr|j||SdS)NF)r,tuplerJrrf_has_value_for_header)rrD passed_key passed_vals r __contains__z#HTTPHeaderDictItemView.__contains__so dE " " Ss4yyA~~%) "J *c** Sz*c/J/J S}:::zRRRurN)rgr!r r=r_r rk)rDr)r rm) rrrr`rar@rSrWrtrrrreres~8    4444))))rrecXeZdZUdZded<d;d<fd Zd=dZd>dZd?dZd@dZ dAdBfd Z dCdZ dCdZ dDdZ dEdZd?dZdd dFd"ZdGd%ZejdHd'ZejdId*ZejfdJd,ZeZeZeZeZdKd-ZdLd.ZdMd/ZdNd1ZdNd2ZdOd4Z dPd7Z!dQd8Z"dQd9Z#dQd:Z$xZ%S)Rr!ap :param headers: An iterable of field-value pairs. Must not contain multiple field names when compared case-insensitively. :param kwargs: Additional field-value pairs to pass in to ``dict.update``. A ``dict`` like container for storing HTTP Headers. Field names are stored and compared case-insensitively in compliance with RFC 7230. Iteration provides the first case-sensitive key seen for each case-insensitive pair. Using ``__setitem__`` syntax overwrites fields that compare equal case-insensitively in order to maintain ``dict``'s api. For fields that compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` in a loop. If multiple fields that are equal case-insensitively are passed to the constructor or ``.update``, the behavior is undefined and some will be lost. >>> headers = HTTPHeaderDict() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' z%typing.MutableMapping[str, list[str]]r5Nrgr*kwargsrc  ti|_|@t|tr||n|||r||dSdSr)r?r@r5r,r! _copy_fromextend)rrgrwrAs rr@zHTTPHeaderDict.__init__s   '>22 %(((( G$$$  KK       rrvalr r=ct|tr|d}||g|j|<dS)Nlatin-1)r,bytesdecoder5lowerrrr{s rrOzHTTPHeaderDict.__setitem__sC c5 ! ! (**Y''C(+Sz $$$rcz|j|}d|ddS)N, r5rjoinrs rrzHTTPHeaderDict.__getitem__s0ociikk*yyQRR!!!rc:|j|=dSrr5rrs rrQzHTTPHeaderDict.__delitem__s OCIIKK ( ( (rr)rmcft|tr||jvSdS)NF)r,rrr5rs rrtzHTTPHeaderDict.__contains__s. c3   299;;$/1 1urdefaultcHt||Sr)r? setdefault)rrrrAs rrzHTTPHeaderDict.setdefault sww!!#w///rotherct|}|dSt||}d|Dd|DkS)NFc>i|]\}}||Srr.0kvs r z)HTTPHeaderDict.__eq__..s&;;;A 1;;;rc>i|]\}}||Srrrs rrz)HTTPHeaderDict.__eq__..s9@ @ @ !QAGGIIq@ @ @ r)r3type itermerged)rrmaybe_constructableother_as_http_header_dicts r__eq__zHTTPHeaderDict.__eq__sCEJJ  &5(2T 3F(G(G %;;):):;;;@ @ %>%I%I%K%K@ @ @   rc.|| Sr)r)rrs r__ne__zHTTPHeaderDict.__ne__s;;u%%%%rr6c*t|jSr)rJr5rs rrSzHTTPHeaderDict.__len__s4?###rr c#VK|jD] }|dV dS)Nr)r5rZ)rvalss rrWzHTTPHeaderDict.__iter__!s<O**,,  Dq'MMMM  rc0 ||=dS#t$rYdSwxYwr)rIrs rdiscardzHTTPHeaderDict.discard&s4 S     DD s  F)combinercJt|tr|d}|}||g}|j||}||urAt |dksJ|r|ddz|z|d<dS||dSdS)aAdds a (name, value) pair, doesn't overwrite the value if it already exists. If this is called with combine=True, instead of adding a new header value as a distinct item during iteration, this will instead append the value to any existing header value with a comma. If no existing header value exists for the key, then the value will simply be added, ignoring the combine parameter. >>> headers = HTTPHeaderDict(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' >>> list(headers.items()) [('foo', 'bar'), ('foo', 'baz')] >>> headers.add('foo', 'quz', combine=True) >>> list(headers.items()) [('foo', 'bar, baz, quz')] r}rorN)r,r~rrr5rrJappend)rrr{r key_lowernew_valsrs raddzHTTPHeaderDict.add,s( c5 ! ! (**Y''CIIKK :)))X>> 4  t99>>>> !8d?S0R C      rargsValidHTTPHeaderSourcect|dkr tdt|dt|dkr|dnd}t|tr2|D]\}}|||nt|t jr1|D]\}}|||nt|t j r`t j t j t j ttf|}|D]\}}|||nSt|drCt|dr3|D]}|||||D]\}}|||dS) zGeneric import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ rz/extend() takes at most 1 positional arguments (z given)rrrrN)rJ TypeErrorr,r!rjrr-r.itemsr0r/r1rr2r)rrrwrrr{rFs rrzzHTTPHeaderDict.extendOs t99q==T#d))TTT t99>>Qr e^ , , *!OO-- # #Sc"""" # v~ . . *!KKMM # #Sc"""" # v / / *K S#X0F GOOE# % % Ue$$$$ % UF # # *}(E(E * zz|| * *eCj)))) ,,.. ! !JC HHS%  ! !r list[str]cdSrrrs rgetlistzHTTPHeaderDict.getlistp rr$list[str] | _DTcdSrr)rrrs rrzHTTPHeaderDict.getlisttrr_Sentinel | _DTc |j|}|ddS#t$r|tjurgcYS|cYSwxYw)zmReturns a list of all the values for the named field. Returns an empty list if the key doesn't exist.rN)r5rrIr&r')rrrrs rrzHTTPHeaderDict.getlistxsg ?399;;/D8O   )... NNN  s+A A  A ctt|jdt|dS)N())rrdictrrs r__repr__zHTTPHeaderDict.__repr__s4t**%BBT__->->(?(?BBBBrcx|D]6}||}|g||j|<7dSr)rr5r)rrrr{s rryzHTTPHeaderDict._copy_fromsI 7 7C--$$C,/;#;DOCIIKK ( ( 7 7rc^t|}|||Sr)rry)rclones rcopyzHTTPHeaderDict.copys,T   rrkc#K|D]:}|j|}|ddD]}|d|fV;dS)z8Iterate over all header lines, including duplicate ones.rNrr)rrrr{s rrjzHTTPHeaderDict.iteritemssf # #C?399;;/DABBx # #1gsl"""" # # #rc#K|D]H}|j|}|dd|ddfVIdS)z:Iterate over all headers, merging duplicate ones together.rrrNrrs rrzHTTPHeaderDict.itermergeds_ - -C/#))++.Ca&$))CG,,, , , , , - -rrec t|Sr)rers rrzHTTPHeaderDict.itemss%d+++r header_namepotential_valuec`||vr)||j|ddvSdS)NrFr)rrrs rrqz$HTTPHeaderDict._has_value_for_headers9 $  "dok6G6G6I6I&J122&NN Nurc`t|}|tS|||Sr)r3NotImplementedrz)rrrs r__ior__zHTTPHeaderDict.__ior__s7DEJJ  &! ! '((( rct|}|tS|}|||Sr)r3rrrzrrrresults r__or__zHTTPHeaderDict.__or__sDDEJJ  &! ! )*** rct|}|tSt||}|||Sr)r3rrrzrs r__ror__zHTTPHeaderDict.__ror__sLDEJJ  &! !d/00 d rr)rgr*rwr)rrr{rr r=r)rrr r=)rr)r rm)r)rrrrr r)rr)r rmr_r)rrr{rrrmr r=)rrrwrr r=)rrr r)rrrr$r r)rrrrr r)r r)rr!r r=)r r!ru)r re)rrrrr rm)rr)r r!)&rrrr`rar@rOrrQrtrrrrSrWrrrzr-overloadrr&r' getheadersgetallmatchingheadersigetget_allrryrrjrrrqrrrrbrcs@rr!r!sB6555        2222 """")))) 0000000     &&&&$$$$  :?!!!!!!!!!!!!F!!!!B _   _  _   _ 4=3G&J# DGCCCC7777  ####---- ,,,, r)r(r)r r*)! __future__rr- collectionsrenumrr threadingr TYPE_CHECKINGtyping_extensionsrr __all__TypeVarr"r#r$Unionr.rr0r1rr&r3GenericMutableMappingr Setrer!rrrrs4"""""" ######  +***** #$4 5fnUfnUfnU  N38 OFLc*+(Z/Z/Z/Z/Z/FN384f6KCQTH6UZ/Z/Z/z-----VZ S#X(>?---`CCCCCV*384CCCCCrPKQZ8cdd!d!,__pycache__/_request_methods.cpython-311.pycnu[ bgLddlmZddlZddlZddlmZddlmZddl m Z ddl m Z m Z ddlmZd gZejejejeejeeffejeejeefffZGd d ZdS) ) annotationsN) urlencode) _TYPE_BODY)HTTPHeaderDict) _TYPE_FIELDSencode_multipart_formdata)BaseHTTPResponseRequestMethodscheZdZdZhdZd d!dZ d"d#dZ d$d%dZ d&d'dZ d"d(dZ dS))r a Convenience mixin for classes who implement a :meth:`urlopen` method, such as :class:`urllib3.HTTPConnectionPool` and :class:`urllib3.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. Specifically, :meth:`.request_encode_url` is for sending requests whose fields are encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded (such as for POST, PUT, PATCH). :meth:`.request` is for making any kind of request, it will look up the appropriate encoding format and use one of the above two methods to make the request. Initializer parameters: :param headers: Headers to include with all requests, unless other headers are given explicitly. >GETHEADDELETEOPTIONSNheaderstyping.Mapping[str, str] | NonereturnNonec|pi|_dSN)r)selfrs O/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/_request_methods.py__init__zRequestMethods.__init__3s}" Tmethodstrurlbody_TYPE_BODY | Noneencode_multipartboolmultipart_boundary str | Nonekw typing.Anyr c  td)NzMClasses extending RequestMethods must implement their own ``urlopen`` method.)NotImplementedError)rrrrrr r"r$s rurlopenzRequestMethods.urlopen6s" ,   rfields_TYPE_FIELDS | Nonejsontyping.Any | None urlopen_kwc |}||td|x||j}dt t j|vrd|d<tj |dd d }|||d <||j vr|j ||f||d |S|j ||f||d |S) a Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. NzUrequest got values for both 'body' and 'json' parameters which are mutually exclusivez content-typezapplication/json Content-Type),:F) separators ensure_asciizutf-8r)r)r)upper TypeErrorrcopymaprlowerkeys_jsondumpsencode_encode_url_methodsrequest_encode_urlrequest_encode_body)rrrrr)rr+r-s rrequestzRequestMethods.requestEs1(   0g   ,++--"c#)W\\^^&D&DDD*<';t OOOVVD  !%Jv  T- - -*4*    ,4+$*G?I r_TYPE_ENCODE_URL_FIELDS | Nonec ||j}d|i}|||r|dt|zz }|j||fi|S)z Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. Nr?)rupdaterr()rrrr)rr-extra_kws rr>z!RequestMethods.request_encode_urlzsg ?lG+4g*> ###  + 36*** *Ct|FC448444rc :||j}dt|i}|r\d|vrtd|rt||\} } nt |d} } | |d<|dd| |||j||fi|S)a Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :func:`urllib3.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :func:`urllib.parse.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', } When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimic behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. NrrzFrequest got values for both 'fields' and 'body', can only specify one.)boundaryz!application/x-www-form-urlencodedr/)rrr5r r setdefaultrDr() rrrr)rr r"r-rEr content_types rr?z"RequestMethods.request_encode_bodysZ ?lG+4nW6M6M*N  I##\  %>%7&&&"ll f%%7# $HV  Y  * *>< H H H ###t|FC448444rr)rrrr)NNTN)rrrrrrrrr r!r"r#r$r%rr )NNNN)rrrrrrr)r*rrr+r,r-r%rr )NN) rrrrr)rArrr-rrr )rrrrr)r*rrr r!r"r#r-rrr ) __name__ __module__ __qualname____doc__r=rr(r@r>r?rrr r s8?>>%%%%%#'37!%)-      &#'&*37"&33333r2637 555556'+37!%)-H5H5H5H5H5H5H5r) __future__rr+r:typing urllib.parser_base_connectionr _collectionsrfilepostrr responser __all__UnionSequenceTuplerbytesMapping_TYPE_ENCODE_URL_FIELDSr rNrrr]s"""""" """"""((((((((((((========&&&&&&   , OFLfl3:&>!>?@ N3 S%Z0013 E5E5E5E5E5E5E5E5E5E5rPKQZ;  $__pycache__/_version.cpython-311.pycnu[ bgbddlmZdZdS)) annotationsz2.0.4N) __future__r __version__G/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/_version.pyr s """""" rPKQZ/^sZZ&__pycache__/connection.cpython-311.pycnu[ bg&XddlmZddlZddlZddlZddlZddlZddlZddlZddl Z ddl m Z ddl m Z ddl mZddlmZejrddlmZdd lmZdd lmZdd lmZdd lmZdd lmZddlmZm Z m!Z!ddl"m#Z#ddl$m%Z% ddl&Z&e&j'Z(n#e)e*f$rdZ&Gdde+Z(YnwxYwddl,m-Z-ddl,m.Z.ddl,m/Z/ddl0m1Z1ddl2m3Z3m4Z4m5Z5m6Z6m7Z7m8Z8ddl9m:Z:m;Z;mm?Z?ddlm@ZAddlmBZBmCZCmDZDmEZEmFZFddlGmHZHmIZIddlJmKZKeLZLeMZMejNeOZPddd ZQejRd!ddZSejTd"ZUeVed#ZWGd$d%e Z Gd&d'e ZXGd(d)ejYZZd*d+dYdEZ[ dZd[dLZ\d\dQZ]d]dRZ^GdSdTZ_e&se_ZXeXZ` d^d_dXZadS)`) annotationsN)HTTPConnection) HTTPException)ResponseNotReadytimeout)Literal HTTPResponse)_TYPE_PEER_CERT_RET_DICT) SSLTransport)HTTPHeaderDict)assert_header_parsing)_DEFAULT_TIMEOUT _TYPE_TIMEOUTTimeout)to_str) wait_for_readceZdZdS) BaseSSLErrorN)__name__ __module__ __qualname__I/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/connection.pyrr$s rr) _TYPE_BODY) ProxyConfig)_ResponseOptions __version__)ConnectTimeoutErrorHeaderParsingErrorNameResolutionErrorNewConnectionError ProxyErrorSystemTimeWarning) SKIP_HEADERSKIPPABLE_HEADERS connectionssl_)body_to_chunks)assert_fingerprint)create_urllib3_context is_ipaddressresolve_cert_reqsresolve_ssl_versionssl_wrap_socket)CertificateErrormatch_hostname)UrlPihttphttpsiz[^-!#$%&'*+.^_`|~0-9a-zA-Z]auditceZdZUdZedZded<ejej dfgZ ded<dZ d ed <d Z d ed <ded<ded<ded<d ed<ded<ded<ded<ded< dMe d de d d ddNfd*ZedOd+ZejdPd-ZdQd/Z dRdSfd3 ZdTd4ZedUd5ZedUd6ZedUd7ZdTfd8 Z dVdWfd= ZdXfd@ Z dYddAdAdAdBdZdIZ dYd[dJZd\fdL ZxZS)]ra Based on :class:`http.client.HTTPConnection` but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``source_address``: Set the source address for the current connection. - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass: .. code-block:: python HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). r9ztyping.ClassVar[int] default_portr z0typing.ClassVar[connection._TYPE_SOCKET_OPTIONS]default_socket_optionsFbool is_verifiedNz bool | Noneproxy_is_verifiedint blocksizetuple[str, int] | Nonesource_addressz&connection._TYPE_SOCKET_OPTIONS | Nonesocket_options_has_connected_to_proxyz_ResponseOptions | None_response_options str | None _tunnel_host int | None _tunnel_port_tunnel_scheme )rrErCrFproxy proxy_confighoststrportrr&None | connection._TYPE_SOCKET_OPTIONSrO Url | NonerPProxyConfig | NonereturnNonect||tj|||||_||_||_d|_d|_d|_ d|_ d|_ dS)N)rQrSrrErCF) super__init__rresolve_default_timeoutrFrOrPrGrHrJrLrM) selfrQrSrrErCrFrOrP __class__s rr[zHTTPConnection.__init__s 3G<<)    - (',$!%(,(,*.rc6|jdS)a Getter method to remove any trailing dots that indicate the hostname is an FQDN. In general, SSL certificates don't include the trailing dot indicating a fully-qualified domain name, and thus, they don't validate properly when checked against a domain name that includes the dot. In addition, some servers may not expect to receive the trailing dot when provided. However, the hostname with trailing dot is critical to DNS resolution; doing a lookup with the trailing dot will properly only resolve the appropriate FQDN, whereas a lookup without a trailing dot will search the system's search domain list. Thus, it's important to keep the original host around for use only in those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). .) _dns_hostrstripr]s rrQzHTTPConnection.hosts"~$$S)))rvaluec||_dS)z Setter for the `host` property. We assume that only urllib3 uses the _dns_host attribute; httplib itself only uses `host`, and it seems reasonable that other libraries follow suit. N)ra)r]rds rrQzHTTPConnection.hostsr socket.socketc  tj|j|jf|j|j|j}n~#tj$r}t|j |||d}~wt$r'}t|d|j d|jd|d}~wt$r}t|d||d}~wwxYwtr!t!jd||j |j|S)zoEstablish a socket connection and set nodelay settings on it. :return: New socket connection. )rErFNzConnection to z timed out. (connect timeout=)z&Failed to establish a new connection: zhttp.client.connect)r+create_connectionrarSrrErFsocketgaierrorr%rQ SocketTimeoutr#OSErrorr&_HAS_SYS_AUDITsysr;)r]sockes r _new_connzHTTPConnection._new_conns)  /+ #2#2 DD  A A A%diq99q @   %XXXXXX     $BqBB    I I+T49di H H H s-36B1A B1)"B  B1B,,B1headerstyping.Mapping[str, str] | Noneschemec|dvrtd|dt|||||_dS)Nr8z$Invalid proxy scheme for tunneling: z", must be either 'http' or 'https')rSrs) ValueErrorrZ set_tunnelrM)r]rQrSrsrur^s rrxzHTTPConnection.set_tunnels` * * *cvccc  4dG<<<$rc||_|jrd|_|t |j|_dS)NT)rrrprJrG_tunnelr?rOrcs rconnectzHTTPConnection.connectsONN$$   +/D ( LLNNN (,DJ'7'7$$$rc|jduSN)rprcs r is_closedzHTTPConnection.is_closedsyD  rcB|jdSt|jd S)NFgr)rprrcs r is_connectedzHTTPConnection.is_connecteds' 9 5 C88888rc|jSr})rGrcs rhas_connected_to_proxyz%HTTPConnection.has_connected_to_proxy s ++rc4 td|_d|_d|_d|_d|_d|_d|_d|_ dS#d|_d|_d|_d|_d|_d|_d|_d|_ wxYw)NF) rZcloserpr@rArGrHrJrLrM)r]r^s rrzHTTPConnection.closes ' GGMMOOODI$D %)D "+0D (%)D " $D  $D "&D   DI$D %)D "+0D (%)D " $D  $D "&D  & & & &s A:Bmethodurl skip_hostskip_accept_encodingct|}|r(td|d|dt ||||S)z+Method cannot contain non-token characters z (found at least rh)rr)_CONTAINS_CONTROL_CHAR_REsearchrwgrouprZ putrequest)r]rrrrmatchr^s rrzHTTPConnection.putrequests*0088  kfkkY^YdYdYfYfkkk ww!! C9CW"   rheadervaluescJtd|Dstj|g|RdSt|t vrDddtt D}td|ddS)rc3VK|]$}t|to |tkV%dSr}) isinstancerRr)).0vs r z+HTTPConnection.putheader..3s6KKq:a%%:!{*:KKKKKKrz', 'cBg|]}t|Sr)rRtitle)rrs r z,HTTPConnection.putheader..7s$KKKv6""KKKrz(urllib3.util.SKIP_HEADER only supports ''N) anyrZ putheaderrlowerr*joinsortedrw)r]rrskippable_headersr^s rrzHTTPConnection.putheader1sKKFKKKKK  EGG f .v . . . . . . FLLNN # #+< < < & KK8I1J1JKKK!! O;LOOO  = .bs2CCaqwwyy 1 1CCCCCCrzaccept-encodingrQ)rr)rrCztransfer-encodingzTransfer-Encodingrzcontent-lengthFTzContent-Lengthz user-agentz User-Agentzutf-8s%x %b s0 )rp settimeoutrr rH frozensetrr-rCchunkscontent_lengthrrR_get_default_user_agentitems endheadersrencodesendlen)r]rrrrsrrrr header_keysrr chunks_and_clrrrrdchunks rrequestzHTTPConnection.request?s 9 I  . . ."2!+)#9 " " "  ?GCC7CCCCC 0K?k)   C.Bi    'tFdnUUU %&5  J"+552I>>>  ;..$ 33 !))"&':IFFFNN#3S5H5HIII { * * NN<)@)B)B C C C$]]__ * *MFE NN65 ) ) ) )    % %eS))2!LL11E%IIoUU0CCDDDDIIe$$$$  $ IIl # # # # # $ $rcrtjdtd|||||ddS)z Alternative to the common request method, which sends the body with chunked encoding and not as one block zHTTPConnection.request_chunked() is deprecated and will be removed in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).category stacklevelT)rrsrN)warningswarnDeprecationWarningr)r]rrrrss rrequest_chunkedzHTTPConnection.request_chunkedsM   X'     VStWd KKKKKrr c j|jt|j}d|_|j|jddlm}t} t|j nO#ttf$r;}tdt||j|dYd}~nd}~wwxYwt#|j }||||j|j|j|j|j||j|j|j }|S)a Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. Nr r z$Failed to parse headers (url=%s): %sT)exc_info) rrsstatusversionreasonrroriginal_responserrr)rHrrprrresponser rZ getresponsermsgr$ TypeErrorlogwarning_url_from_connectionrrrrrrrrrr)r] resp_optionsr httplib_responsehpersrr^s rrzHTTPConnection.getresponsesr  ! )"$$ $- !% T\*** +*****!77..00  !"2"6 7 7 7 7"I.    KK6$T<+CDD          !!1!5!;!;!=!=>><!#*$,#*(8'6.#/#F'6$0    s+BC 1CC r})rQrRrSrKrrrErDrCrBrFrTrOrUrPrVrWrXrWrR)rdrRrWrX)rWrf)NNr9) rQrRrSrKrsrtrurRrWrXrWrX)rWr?)FF) rrRrrRrr?rr?rWrX)rrRrrRrWrX)NN)rrRrrRrrrsrtrr?rr?rr?rr?rWrX) rrRrrRrrrsrtrWrX)rWr )rrr__doc__port_by_schemer=__annotations__rj IPPROTO_TCP TCP_NODELAYr>r@rArr[propertyrQsetterrrrxr{r~rrrrrrrr __classcell__r^s@rrrSs"0*8)?L????  V/3P K&*))))NNN****::::!!!!....  / "215.D +/////////F***X*$ [[D 37 % % % % % % % 8 8 8 8!!!X!999X9 ,,,X, ' ' ' ' ' '& %*        (      $#'37 _$ $#'+_$_$_$_$_$_$J#'37 LLLLL&5555555555rrc eZdZUdZedZdZded<dZded<dZ ded<dZ d ed <dZ ded <dZ d ed <dZ d ed<dZded< d3eddejdddddddddddddddddd4fd+Z d5d6d,Zd7d-Zd8d2ZxZS)9HTTPSConnectionz Many of the parameters to this constructor are passed to the underlying SSL socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. r:Nint | str | None cert_reqsrIca_certs ca_cert_dirNone | str | bytes ca_cert_data ssl_versionrKssl_minimum_versionssl_maximum_versionr.rN)rrErCrFrOrPrassert_hostnamer.server_hostname ssl_contextrrrrrr cert_filekey_file key_passwordrQrRrSrrrErDrCrBrFrTrOrUrPrVrNone | str | Literal[False]rrssl.SSLContext | NonerrrrWrXc t||||||||||_||_||_| |_| |_| |_| |_||_ ||_ ||_ |otj ||_|otj ||_||_| #|j |jj} nt'd} | |_dS)N)rSrrErCrFrOrP)rZr[rrrrrrr.rrrospath expanduserrrr verify_moder1r)r]rQrSrrErCrFrOrPrrr.rrrrrrrrrrrr^s rr[zHTTPSConnection.__init__s6  ))%  ! "(&.."4&#6 #6  ARW%7%7%A%A &J27+=+=k+J+J(  + ,8 -d33 "rc tjdtd|#|j |jj}nt d}||_||_||_||_ ||_ ||_ |otj ||_|otj ||_| |_dS)zX This method should only be called once, before the connection is used. zHTTPSConnection.set_cert() is deprecated and will be removed in urllib3 v2.1.0. Instead provide the parameters to the HTTPSConnection constructor.rrN)rrrrrr1rrrrrr.rrrrrr) r]rrrrrrr.rrs rset_certzHTTPSConnection.set_cert8s   +(       + ,8 -d33   ""(."4 ARW%7%7%A%A &J27+=+=k+J+J(rc |x|_}|j}d}|jQ|jdkr$||j|x|_}d}d|_||j}|j|j}tj tk}|r#tjdtdtt!did|d|jd|jd |jd |jd |jd |jd |jd|jd|jd|jd|d|jd|d|jd|j}|j|_|j|_tA|j!|_dS)NFr:TzSystem time is way off (before z5). This will probably lead to SSL verification errorsrprrrrrrrrrrrr tls_in_tlsrr.r)"rrrprQrJrM_connect_tls_proxyrGrzrdatetimedatetoday RECENT_DATErrr(#_ssl_wrap_socket_and_match_hostnamerrrrrrrrrrrrr.rjr@r?rO)r]rprr is_time_offsock_and_verifieds rr{zHTTPSConnection.connectas>>+++ D#y    ("g--#'#:#:49d#K#KK D! ,0D ( LLNNN"/O   +"2Om))++k9   M6k666"    @    nn (( !% 8 8  !% 8 8  ]]  (( ** nn ]] ** ,O (( "z !00 $66! $&, ,8 (,DJ'7'7$$$rhostnamerprf ssl.SSLSocketctjt|j}|j}t |f|j|j|j|j |j |j |j |||j |jddddd}|j|_|jS)zY Establish a TLS connection to the proxy using the provided SSL context. NF)rrrrrrrrrrr.rrrr)typingcastrrPrrrrrrrrrrr.r@rArj)r]rrprPrrs rrz"HTTPSConnection._connect_tls_proxys {;0ABB ". ?  n( $ 8 $ 8](*$#(8+>#   &"3!> ''rr}).rQrRrSrKrrrErDrCrBrFrTrOrUrPrVrrrrr.rIrrIrrrrIrrIrrrrKrrKrrrrIrrIrrIrWrX) NNNNNNNNN)rrIrrIrrrrIrrIrrr.rIrrIrrrWrXr)rrRrprfrWr)rrrrrr=rrrrrrrrr.rrr>r[rr{rrrs@rrrs "'*L"&I&&&&H"K""""'+L++++$(K((((&*****&*****%)))))  :# "215.<.S +/&*7;)-&*-1#"&+/*.*.(, $##'3:#:#:#:#:#:#:#:#| $ $&*#'#7;)-"&+/')')')')')R98989898v((((((((rrc(eZdZUdZded<ded<dS)_WrappedAndVerifiedSocketz[ Wrapped socket and whether the connection is verified after the TLS handshake zssl.SSLSocket | SSLTransportrjr?r@N)rrrrrrrrr r s6 )(((rr F)rrprfrNone | str | intrrrKrrrIrrrrrrrrr.rrrrr?rWc d}|0d}tt|||t|}n|}t||_| s| s| dustjs tjsd|_|s*| s(| s&|r$t|dr| | G| d}d|vr|d| d}t|r|} t|||||| | | || } | r%t|d | n_|jt jkrJ|jsC| dur?|}|rd}nt%|d dpd}t'|| p| |t)||jt jkpt-|  S#t.$r|wxYw) a2Logic for constructing an SSLContext from all TLS parameters, passing that down into ssl_wrap_socket, and then doing certificate verification either via hostname or fingerprint. This function exists to guarantee that both proxies and targets have the same behavior when connecting via TLS. FNT)rrrrload_default_certs[]%) rpkeyfilecertfilerrrrrrr) binary_formhostname_checks_common_name)rjr@)r/r2r1rr, IS_PYOPENSSLHAS_NEVER_CHECK_COMMON_NAMEcheck_hostnamehasattrr striprfindr0r3_assert_fingerprint getpeercertssl CERT_NONEgetattr_match_hostnamer  CERT_REQUIREDr? BaseExceptionr)rprrrrrrrrrrrr.rrrdefault_ssl_contextcontext normalizedssl_sockcertrs rrrs0 "(+K88 3 3' 22    +I66G  '  ' e # #   $/ $ "' %%% % G1 2 2 % ""$$$ "$**400 *  #$;j&6&6s&;&;$;>3C3C D DEE"22 2 !1 1  (  \!5 Y,':Q:Q  WY \ \ G G NrcdtS)Nzpython-urllib3/r!rrrrrts *[ * **rceZdZdZdS)DummyConnectionz-Used to detect a failed ConnectionCls import.N)rrrrrrrr:r:xs7777rr:conn HTTPConnection | HTTPSConnectionrcxt|trdnd}t||j|j|jS)zUReturns the URL from a given connection. This is mainly used for testing and logging.r:r9)rurQrSr)rrr6rQrSr)r;rrus rrrs: #499 EWWvF f49494 H H H LLr)"rprfrr rr rrKrrKrrIrrIrrIrrIrrIrrrrr.rIrrIrrrr?rWr )F)r%r&r'rRrr?rWrX)r+r,r-rIrWr'rr})r;r<rrIrWrR)b __future__rrloggingrr0rjrorr http.clientr_HTTPConnectionrrrrl TYPE_CHECKINGtyping_extensionsr rr util.ssl_r util.ssltransportr _collectionsr util.responser util.timeoutrrr util.utilr util.waitrrSSLErrorr ImportErrorAttributeErrorr _base_connectionrrr _versionr" exceptionsr#r$r%r&r'r(utilr)r*r+r, util.requestr-r.rr/r0r1r2r3util.ssl_match_hostnamer4r5util.urlr6ConnectionErrorBrokenPipeError getLoggerrrrrrcompilerrrnr NamedTupler rrr7rr:VerifiedHTTPSConnectionrrrrr[s"""""" 999999666666((((((++++++ 0))))))&&&&&&333333//////((((((000000BBBBBBBBBB$$$$$$ JJJZ?ddl;m@Z@ddlAmBZBejCrddlDZDddlEmFZFddlmGZGmHZHejIeJZKejLe:eMe9dfZNejOdZPGddZQejRejShZTGddeQeZUGddeUZVd2d$ZWejXd3d)Z>ejXd4d*Z>d5d+Z> d6d7d/ZYd8d1ZZdS)9) annotationsNtimeout) TracebackType) _TYPE_BODY)RequestMethods) BaseSSLErrorBrokenPipeErrorDummyConnectionHTTPConnection HTTPExceptionHTTPSConnection ProxyConfig_wrap_proxy_error)port_by_scheme) ClosedPoolErrorEmptyPoolError FullPoolErrorHostChangedErrorInsecureRequestWarningLocationValueError MaxRetryErrorNewConnectionError ProtocolError ProxyErrorReadTimeoutErrorSSLError TimeoutError)BaseHTTPResponse)is_connection_dropped)connection_requires_http_tunnel)_TYPE_BODY_POSITIONset_file_position)Retry)CertificateError)_DEFAULT_TIMEOUT _TYPE_DEFAULTTimeout)Url_encode_target)_normalize_host) parse_url)to_str)Literal)BaseHTTPConnectionBaseHTTPSConnection_SelfTcZeZdZUdZdZded<ejZddd Z dd Z ddZ ddZ ddZ dS)ConnectionPoola; Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. .. note:: ConnectionPool.urlopen() does not normalize or percent-encode target URIs which is useful if your target server doesn't support percent-encoded target URIs. N str | Noneschemehoststrport int | NonereturnNonec|stdt||j|_||_t ||j|_dS)NzNo host specified.r6)rr,r6r7r9normalize_hostlower _tunnel_host)selfr7r9s M/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/connectionpool.py__init__zConnectionPool.__init__Rsa ;$%9:: :#D===  +4 DDDJJLLcPt|jd|jd|jdS)Nz(host=z, port=))type__name__r7r9rBs rC__str__zConnectionPool.__str___s.t**%OOTYOOOOOOrErBr2c|SNrJs rC __enter__zConnectionPool.__enter__bs rEexc_typetype[BaseException] | Noneexc_valBaseException | Noneexc_tbTracebackType | NoneLiteral[False]c.|dS)NF)close)rBrPrRrTs rC__exit__zConnectionPool.__exit__es urEcdSzD Close all pooled connections and disable the pool. NrNrJs rCrXzConnectionPool.closeorErM)r7r8r9r:r;r<)r;r8)rBr2r;r2)rPrQrRrSrTrUr;rVr;r<)rI __module__ __qualname____doc__r6__annotations__queue LifoQueueQueueClsrDrKrOrYrXrNrErCr4r4DsFH M M M M MPPPP      rEr4c eZdZUdZdZeZded<dedddddddf dHdZ dId Z dJdKd"Z dLd&Z dMd'Z dMd(ZdNd+ZdOd0Zdddeddd1d1d1f dPd<ZdQd=ZdRd>Zdddd1d1eddddd1d1f dSdGZdS)THTTPConnectionPoola Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`http.client.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`http.client.HTTPConnection`. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param retries: Retry configuration to use by default with requests in this pool. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.ProxyManager` :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.ProxyManager` :param \**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, :class:`urllib3.connection.HTTPSConnection` instances. httpz4type[BaseHTTPConnection] | type[BaseHTTPSConnection] ConnectionClsNrFr7r8r9r:r_TYPE_TIMEOUT | Nonemaxsizeintblockboolheaderstyping.Mapping[str, str] | NoneretriesRetry | bool | int | None_proxy Url | None_proxy_headers _proxy_configProxyConfig | Noneconn_kw typing.Anyc  t|||tj||t|tst j|}| t j}||_||_ | ||_ ||_ ||_ | pi|_| |_t!|D]} |j dd|_d|_| |_|j r9|jdg|j |jd<|j|jd<|j } t-j|t0| dS)Nrsocket_optionsproxy proxy_config)r4rDr isinstancer) from_floatr%DEFAULTrrprdpoolrlr{ proxy_headersr|rangeputnum_connections num_requestsrw setdefaultweakreffinalize_close_pool_connections)rBr7r9rrjrlrnrprrrtrurw_rs rCrDzHTTPConnectionPool.__init__sQ dD111g...'7++ 2(11G ?mG  8< g8N8N   +1r)w  A IMM$     ! : = L # #$4b 9 9 9$(JDL !+/+Pool reached maximum size and no more connections are allowed.zLConnection pool is full, discarding connection: %s. Connection pool size: %s) rrrrbFullrXrlrrwarningr7qsizers rC _put_connzHTTPConnectionPool._put_conn)s 9   d% 000!   :   !JJLLL: 'X   bIIOO%% &   JJLLLLL  s' B*A4B*)B*cdS)U Called right before a request is made, after the socket is created. NrNrs rC_validate_connz!HTTPConnectionPool._validate_connTr\rEcdSrMrNrs rC_prepare_proxyz!HTTPConnectionPool._prepare_proxyYs rE _TYPE_TIMEOUTr)c|tur|jSt|tr|St j|S)z:Helper that always returns a :class:`urllib3.util.Timeout`)r'rcloner}r)r~)rBrs rC _get_timeoutzHTTPConnectionPool._get_timeout]sU & & &<%%'' ' gw ' ' /==?? "%g.. .rEerr&BaseSSLError | OSError | SocketTimeouturl timeout_valuect|trt||d|d|t|dr$|jt vrt||d|d|dSdS)zAIs the error actually a timeout? Will raise a ReadTimeout or passRead timed out. (read timeout=rGerrnoN)r} SocketTimeoutrhasattrr_blocking_errnos)rBrrrs rC_raise_timeoutz!HTTPConnectionPool._raise_timeoutis c= ) ) "cLMLLL  3  SY2B%B%B"cLMLLL   %B%BrETmethodbody_TYPE_BODY | None Retry | Nonechunked response_connpreload_contentdecode_contentenforce_content_lengthr c |xjdz c_||} | tj| j|_ ||n7#ttf$r#}| |||jd}~wwxYwn#ttttttf$r}|}t!|ttfrt|}t!|ttttfr*|r(|jr!|jst'||jj}|d}~wwxYw ||||||| | | n8#t,$rYn,t$r }|jt.jkrYd}~nd}~wwxYw| j}|js"|dkrt7||d|d||_ |}n2#ttf$r}| |||d}~wwxYw||_| |_||_t@!d|j|j"|j#|||j$|j%|j& |S) a8 Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param response_conn: Set this to ``None`` if you will handle releasing the connection or set the connection to have the response release it. :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. rrrrN)rrnrrrrrrrGz%s://%s:%s "%s %s %s" %s %s)'rr start_connectr)resolve_default_timeoutrrrrr rOSErrorrrr&rr}r{has_connected_to_proxyrr6requestr r EPROTOTYPE read_timeout is_closedr getresponserp _connection_poolrrr7r9 _http_vsn_strstatuslength_remaining)rBrrrrrnrprrrrrr timeout_objenew_erresponses rC _make_requestz HTTPConnectionPool._make_request|s\ Q''00 !!###6{7RSS   ##D))))!<0   ##s$,#OOO *            !E!l,<=>> $  !3\8L D D J D/3/J D*%1BCCK# *  LL /-'=         D   w%***+****  #/ ~ ( q  &#O OOO(DL ''))HHg&      A3l  K K K   #, ) K I I     O  % smA0/B(0B$BB$$B(()EBEEE99 F. F.F))F."G77H&H!!H&cT|jdS|jdc}|_t|dSr[)rr)rBold_pools rCrXzHTTPConnectionPool.close1s7 9  F"i$) )))))rEc:|drdSt|^}}}}}|pd}|t||}|jr|st j|}n!|js|t j|krd}|||f|j|j|jfkS)zj Check if the given ``url`` is a member of the same host as this connection pool. /TrgNr>) startswithr-r,r9rrr6r7)rBrr6rr7r9s rC is_same_hostzHTTPConnectionPool.is_same_host=s >>#   4%.cNN!4!6  "4777D 9 T !%f--DD t~'9&'A'AAADd# TY 'JJJrEredirectassert_same_host pool_timeout release_conn bool | Nonebody_pos_TYPE_BODY_POSITION | None response_kwc @ t|}|j}||j}t|tst j|||j}| | } |r&||st|||| drtt|}nt|j }d}| }t|j|j|}|s.|}||jd}d}t'|| } ||}|| }|j|_|jg|re|jr^ ||nG#t4t6t8f$r-}|||jj |jd}~wwxYw| s|nd}|j|||f|||| ||| |d|}d}nL#t>$rd}d}t@tBt6tDt4tFtHtJf$r}d}|}t|t4tHfrtG|}t|t6tLt@tFtBfr+|r)|jr"|j'stQ||jj}n,t|t6tBfrtEd |}|)||||tUj+d  }|,|}Yd}~nd}~wwxYw|s|r|-d}d}|r|.|n9#|s|r|-d}d}|r|.|wwxYw|s9t^0d ||||j1|||||||f|| | | | | |d |S|o|2}|r|j3dkrd} |)||||}n.#th$r!|j5r|6|cYSwxYw|6|7|t^8d|||j1||||f||||| | | | | |d |Sts|j:d}|;||j3|r |)||||}n.#th$r!|j<r|6|cYSwxYw|6|,|t^8d||j1||||f||||| | | | | |d |S|S)ay Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param bool preload_content: If True, the response's body will be preloaded into memory. :param bool decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``preload_content`` which defaults to ``True``. :param bool chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. N)rdefaultrFrr)rrrnrrprrrTzConnection aborted.)errorr _stacktracez1Retrying (%r) after connection broken by '%r': %s)rrrrrrri/GET)rrzRedirecting %s -> %s) rprrrrrrrrrz Retry-Afterz Retry: %s)=r-r6rnr}r%from_intrprrrr.r+rr"r{r|copyupdaterr$rrrrrrr rrrrrrrrrr&rrrr incrementsysexc_infosleeprXrrrurlopenget_redirect_locationrrraise_on_redirect drain_connsleep_for_retryrrmris_retryraise_on_status)rBrrrrnrprrrrrrrrrr parsed_urldestination_schemerrelease_this_connhttp_tunnel_requiredr clean_exitrrrrrredirect_locationhas_retry_afters rCrzHTTPConnectionPool.urlopenSsYbs^^ '. ?lG'5)) WnWxVVVG  *L  7D$5$5c$:$: 7"4g66 6 >>#   ),,--CC((C)> J)+=  $ /llnnG NN4- . . . %T844e %++G44K>>,>77D&6DLz%*>%4>%''----$g}=''4:>( )5>DD$M*t) $+ /-    H JJ   J %            $ $ $ J E!l,<=>> $  & !    D D!J D04/J D*%1BCCEG]#;<< D%&;UCC''5#,..QRBS(G MMOOOCCCCCCI$ N )  JJLLLD$(!  %t$$$ )  JJLLLD$(!  %t$$$$  %   KKCWcSV    4<  ))! /- &%I)G)G)I)I  #%% !++FC(RV+WW    ,'')))     ! ! !  # #H - - - II,c3D E E E4<!   !!1))! /- &x/33MBBCC   FHO_ E E  !++FC(RV+WW    *'')))     ! ! ! MM( # # # IIk3 ' ' '4<   !!1))! /- $s{'AG//FG/G (GG  $G/-M//AL83C;L3.M/3L88M//6N% P##(Q Q6T(T;:T;)r7r8r9r:rrirjrkrlrmrnrorprqrrrsrtrorurvrwrx)r;r0rM)rrr;r0)rrr;r<rr0r;r<)rrr;r))rrrr8rrir;r<)rr0rr8rr8rrrnrorprrrrrmrrrrmrrmrrmr;r r])rr8r;rm) rr8rr8rrrnrorprqrrmrrmrrrr:rrrrmrrrrmrrmrrxr;r )rIr^r_r`r6r rhrar'rDrrrrrrrrrXrrrNrErCrfrfys11fF  (837-1!:>,0;>;>;>;>;>z($($($($($(L))))V         / / / /0#'37 $!137 $#'+sssssj * * * *KKKK4#'37-1!%!1#'$(/3 $#lllllllrErfc|eZdZUdZdZeZded<dedddddddddddddddddfd4fd, Z d5d/Z d6d1Z d7fd3 Z xZ S)8HTTPSConnectionPoola Same as :class:`.HTTPConnectionPool`, but HTTPS. :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. httpsztype[BaseHTTPSConnection]rhNrFr7r8r9r:rrirjrkrlrmrnrorprqrrrsrtkey_filer5 cert_file cert_reqsint | str | None key_passwordca_certs ssl_versionssl_minimum_versionssl.TLSVersion | Nonessl_maximum_versionassert_hostnamestr | Literal[False] | Noneassert_fingerprint ca_cert_dirrwrxr;r<c tj||||||||| f i|| |_| |_| |_| |_||_||_||_||_ ||_ ||_ ||_ dSrM) superrDrrrrrr rrrr r )rBr7r9rrjrlrnrprrrtrrrrrrrrr r r rw __class__s rCrDzHTTPSConnectionPool.__init__s0            ! ""(  &&#6 #6 ."4rErrc|jr|jjdkrd}nd}|||j|j|j|dS)z5Establishes a tunnel connection through HTTP CONNECT.rrg)r6r7r9rnN)r{r6 set_tunnelrAr9rconnect)rBr tunnel_schemes rCrz"HTTPSConnectionPool._prepare_proxysi : #$*+w66#MM"M  "&    rEr1c|xjdz c_td|j|j|jpd|jr|jt urtd|j}|j}|j$|jj|jj}|jj}|jd|||j j |j |j |j |j|j|j|j|j|j|j|jd|jS)zL Return a fresh :class:`urllib3.connection.HTTPConnection`. rz)Starting new HTTPS connection (%d): %s:%s443zCCan't connect to HTTPS URL because the SSL module is not available.N)r7r9rrrrrrr r r rrrrN)rrrr7r9rhr ImportErrorr{rrrrrrrr r r rrrrw)rB actual_host actual_ports rCrzHTTPSConnectionPool._new_conns! ! 7  I I     ! T%7?%J%JU  9 i : !djo&A*/K*/K!t! L0n]*n]( 0#6( $ 8 $ 8  l   rEr0ct||jr||js%t jd|jdtdSdS)rz0Unverified HTTPS request is being made to host 'z'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warningsN) rrrr is_verifiedwarningswarnr7r)rBrrs rCrz"HTTPSConnectionPool._validate_conn<s t$$$ >  LLNNN  M$ty$$$ '       rE),r7r8r9r:rrirjrkrlrmrnrorprqrrrsrtrorr5rr5rrrr5rr5rrrrrrr r r r5r r5rwrxr;r<)rrr;r<)r;r1r)rIr^r_r`r6rrhrar'rDrrr __classcell__)rs@rCrrs  F/>M>>>>  (837-1!:># $&*#'#(,59597;)-"&+/5/5/5/5/5/5/5b    ' ' ' ' RrErrr8kwrxr;c t|^}}}}}|pd}|ptj|d}|dkrt|fd|i|St |fd|i|S)a Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') rgPrr9)r-rrrrf)rrr6rr7r9s rCconnection_from_urlr!Rs|(!*#FAtTA  vF  1>%fb11D "499d9b999!$88T8R888rEr7r<r6r5cdSrMrNr7r6s rCr,r,oCrEcdSrMrNr#s rCr,r,tr$rEct||}|r4|dr|dr |dd}|S)z? Normalize hosts for comparisons and use with sockets. []r)r?rendswithr#s rCr,r,ysU $ ' 'D $$s););AbDz KrEr(HTTPConnectionPool | HTTPSConnectionPoolpathcPt|j|j|j|jS)zZReturns the URL from a given connection pool. This is mainly used for testing and logging.)r6r7r9r,)r*r6r7r9r)rr,s rC_url_from_poolr.s$ dk   M M M QQrEqueue.LifoQueue[typing.Any]c |d}|r|-#tj$rYdSwxYw)z2Drains a queue of connections and closes each one.TFrN)rrXrbr)rrs rCrrs_  88%8((D    ;     s.0AA)rr8rrxr;rf)r7r<r6r5r;r<)r7r8r6r5r;r8)r7r5r6r5r;r5rM)rr+r,r5r;r8)rr/r;r<)[ __future__rrloggingrbrtypingrrsocketrrtypesr_base_connectionr_request_methodsr connectionr r r r rrrrr exceptionsrrrrrrrrrrrrrrr util.connectionr! util.proxyr" util.requestr#r$ util.retryr%util.ssl_match_hostnamer& util.timeoutr'r(r)util.urlr*r+r,r?r- util.utilr. TYPE_CHECKINGssltyping_extensionsr/r0r1 getLoggerrIrUnionfloatrTypeVarr2r4EAGAIN EWOULDBLOCKrrfrr!overloadr.rrNrErCrLsV""""""  ++++++((((((,,,,,,                    988888'&&&&&222222777777@@@@@@@@555555BBBBBBBBBB))))))))777777 JJJJ))))))IIIIIIIIg!! We]D@A  ! !. . . . . . . . dL%"34F F F F F F F F RMMMMM,MMM`9999:&HLRRRRR      rEPKQZWZ!K!K&__pycache__/exceptions.cpython-311.pycnu[ bg$ ddlmZddlZddlZddlZddlmZddlmZ ej rddl m Z ddl mZddlmZdd lmZGd d eZGd d eZejejdefejedffZGddeZGddeZGddeZGddeZGddeZGddeZ e Z!GddeZ"GddeZ#Gdd eZ$Gd!d"eZ%Gd#d$e%eZ&Gd%d&e%Z'Gd'd(e'eZ(Gd)d*e(Z)Gd+d,eZ*Gd-d.eZ+Gd/d0eZ,Gd1d2e-eZ.Gd3d4e.Z/Gd5d6e.Z0Gd7d8eZ1Gd9d:eZ2Gd;de2Z4Gd?d@e2Z5GdAdBe2Z6GdCdDeZ7GdEdFe e-Z8GdGdHeZ9GdIdJee ZGdKdLee Z:GdMdNeZ;GdOdPeGdSdTeZ?GdUdVeZ@dS)W) annotationsN) MessageDefect)IncompleteRead)HTTPConnection)ConnectionPool) HTTPResponse)RetryceZdZdZdS) HTTPErrorz#Base exception used by this module.N__name__ __module__ __qualname____doc__I/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/exceptions.pyr r s----rr ceZdZdZdS) HTTPWarningz!Base warning used by this module.Nr rrrrrs++++rr.c,eZdZdZd fd Zd d ZxZS) PoolErrorz/Base exception for errors caused within a pool.poolrmessagestrreturnNonecb||_t|d|dSNz: )rsuper__init__)selfrr __class__s rr!zPoolError.__init__"6  D--G--.....r_TYPE_REDUCE_RESULTc|jdfS)N)NNr#r"s r __reduce__zPoolError.__reduce__&s~|++r)rrrrrrrr%rrrrr!r) __classcell__r's@rrrsW99//////,,,,,,,,rrc,eZdZdZd fd Zd d ZxZS) RequestErrorz8Base exception for PoolErrors that have associated URLs.rrurlrrrrcZ||_t||dSN)r/r r!)r"rr/rr#s rr!zRequestError.__init__.s* w'''''rr%c$|jd|jdffSr1)r#r/r(s rr)zRequestError.__reduce__2s~dh555r)rrr/rrrrrr*r+r's@rr.r.+sWBB((((((66666666rr.ceZdZdZdS)SSLErrorz9Raised when SSL certificate fails in an HTTPS connection.Nr rrrr4r47CCCCrr4c0eZdZUdZded<d fd ZxZS) ProxyErrorz,Raised when the connection to a proxy fails. Exceptionoriginal_errorrrerrorrrcZt||||_dSr1)r r!r9)r"rr:r#s rr!zProxyError.__init__As+ %(((#r)rrr:r8rr)rrrr__annotations__r!r,r's@rr7r7;sS66$$$$$$$$$$rr7ceZdZdZdS) DecodeErrorz;Raised when automatic decoding based on Content-Type fails.Nr rrrr>r>FEEEErr>ceZdZdZdS) ProtocolErrorz>Raised when something unexpected happens mid-request/response.Nr rrrrArAJsHHHHrrAc(eZdZdZ d d fd ZxZS) MaxRetryErroraRaised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param str url: The requested Url :param reason: The underlying error :type reason: :class:`Exception` Nrrr/rreasonException | Nonerrcn||_d|d|d}t|||dS)NzMax retries exceeded with url: z (Caused by ))rDr r!)r"rr/rDrr#s rr!zMaxRetryError.__init__`sF PCPPVPPP sG,,,,,rr1)rrr/rrDrErrrrrrr!r,r's@rrCrCUsSJN-----------rrCc(eZdZdZ d d fd ZxZS)HostChangedErrorz?Raised when an existing pool gets a request for a foreign host.rrr/rretries Retry | intrrcfd|}t|||||_dS)Nz'Tried to open a foreign host with url: )r r!rL)r"rr/rLrr#s rr!zHostChangedError.__init__ms:BCAA sG,,, r)rK)rrr/rrLrMrrrHr's@rrJrJjsOIIFGrrJceZdZdZdS)TimeoutStateErrorz1Raised when passing an invalid state to a timeoutNr rrrrPrPus;;;;rrPceZdZdZdS) TimeoutErrorzRaised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. Nr rrrrRrRysrrRceZdZdZdS)ReadTimeoutErrorzFRaised when a socket timeout occurs while receiving data from a serverNr rrrrTrTsPPPPrrTceZdZdZdS)ConnectTimeoutErrorz@Raised when a socket timeout occurs while connecting to a serverNr rrrrVrVsJJJJrrVc<eZdZdZd fd Zed d ZxZS) NewConnectionErrorzHRaised when we fail to establish a new connection. Usually ECONNREFUSED.connrrrrrcb||_t|d|dSr)rYr r!)r"rYrr#s rr!zNewConnectionError.__init__r$rcHtjdtd|jS)Nz\The 'pool' property is deprecated and will be removed in urllib3 v2.1.0. Use 'conn' instead.) stacklevel)warningswarnDeprecationWarningrYr(s rrzNewConnectionError.pools0  5      yr)rYrrrrr)rr)rrrrr!propertyrr,r's@rrXrXscRR//////XrrXc$eZdZdZd fd ZxZS) NameResolutionErrorz'Raised when host name resolution fails.hostrrYrrDsocket.gaierrorc^d|d|d}t||dS)NzFailed to resolve 'z' (rGr r!)r"rdrYrDrr#s rr!zNameResolutionError.__init__s;:::::: w'''''r)rdrrYrrDrerHr's@rrcrcsC11((((((((((rrcceZdZdZdS)EmptyPoolErrorzCRaised when a pool runs out of connections and no more are allowed.Nr rrrririMMMMrriceZdZdZdS) FullPoolErrorzGRaised when we try to add a connection to a full pool in blocking mode.Nr rrrrlrlsQQQQrrlceZdZdZdS)ClosedPoolErrorzCRaised when a request enters a pool after the pool has been closed.Nr rrrrnrnrjrrnceZdZdZdS)LocationValueErrorzProxy URL had no scheme, should start with http:// or https://z!Proxy URL had unsupported scheme z , should use http:// or https://rgrys rr!zProxySchemeUnknown.__init__#sO [ F >VGGb&bbbG !!!!!r)rwrrrrHr's@rrrsC;; " " " " " " " " " "rrceZdZdZdS)ProxySchemeUnsupportedz=Fetching HTTPS resources through HTTPS proxies is unsupportedNr rrrrr/sGGGGrrc$eZdZdZd fd ZxZS) HeaderParsingErrorzNRaised by assert_header_parsing, but we convert it to a log.warning statement.defectslist[MessageDefect] unparsed_databytes | str | Nonerrc\|pdd|}t|dS)NUnknownz, unparsed data: rg)r"rrrr#s rr!zHeaderParsingError.__init__6s;) MMMMM !!!!!r)rrrrrrrHr's@rrr3sCXX""""""""""rrceZdZdZdS)UnrewindableBodyErrorz9urllib3 encountered an error when trying to rewind a bodyNr rrrrr=r5rr)A __future__rsockettypingr^ email.errorsr http.clientrhttplib_IncompleteRead TYPE_CHECKING connectionrconnectionpoolrrr util.retryr r8r WarningrTupleCallableobjectr%rr.r4r7r>rAConnectionErrorrCrJrPrRrTrVrXrcrirlrn ValueErrorrprrrvr{rrrrrrrrrrAssertionErrorrrrrrrrrs"""""" &&&&&&@@@@@@ "******......&&&&&&!!!!!! ..... ...,,,,,',,,l OCK &,vs{";; , , , , , , , , 6 6 6 6 69 6 6 6DDDDDyDDD$$$$$$$$FFFFF)FFFIIIIIIIII   -----L---*|<<<<< <<<9QQQQQ|\QQQ KKKKK,KKK,i&(((((,(((NNNNNYNNNRRRRRIRRRNNNNNiNNNGGGGGYGGG!!!!!+!!!)>>>>>I>>>;;;;;k;;;:::::_:::44444444;;;;;;;;TTTTToTTT FFFFF FFFy     Y 6   &      $:    33333I333""""")9""""HHHHHZHHH""""""""DDDDDIDDDDDrPKQZJ22"__pycache__/fields.cpython-311.pycnu[ bg+ddlmZddlZddlZddlZejeefZ eje ej ee fej ee effZ ddd Z dd Z ddZddZddZGddZdS)) annotationsNapplication/octet-streamfilename str | NonedefaultstrreturncB|rtj|dp|S|S)z Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. r) mimetypes guess_type)rrs E/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/fields.pyguess_content_typers,<#H--a0;G; Nnamevalue_TYPE_FIELD_VALUEcddl}|dtdttrdt fddDs7|d d } |d |S#ttf$rYnwxYwtj d|d S) a Helper function to format and quote a single header parameter using the strategy defined in RFC 2231. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows `RFC 2388 Section 4.4 `_. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as ``bytes`` or `str``. :returns: An RFC-2231-formatted unicode string. .. deprecated:: 2.0.0 Will be removed in urllib3 v2.1.0. This is not valid for ``multipart/form-data`` header parameters. rNz'format_header_param_rfc2231' is deprecated and will be removed in urllib3 v2.1.0. This is not valid for multipart/form-data header parameters. stacklevelutf-8c3 K|]}|vV dSN).0chrs r z.format_header_param_rfc2231..@s'//rrU{//////rz"\ =""asciiz*=) warningswarnDeprecationWarning isinstancebytesdecodeanyencodeUnicodeEncodeErrorUnicodeDecodeErroremailutilsencode_rfc2231)rrr!results ` r format_header_param_rfc2231r/s (OOO MM 1  %& W%% ////Y/// / /$$E$$$  MM' " " "M#$67    D  K & &ug 6 6E  u  E Ls1BBBct|tr|d}|dddd}|d|dS)a Format and quote a single multipart header parameter. This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching the behavior of current browser and curl versions. Values are assumed to be UTF-8. The ``\n``, ``\r``, and ``"`` characters are percent encoded. .. _WHATWG HTML Standard: https://html.spec.whatwg.org/multipage/ form-control-infrastructure.html#multipart-form-data :param name: The name of the parameter, an ASCII-only ``str``. :param value: The value of the parameter, a ``str`` or UTF-8 encoded ``bytes``. :returns: A string ``name="value"`` with the escaped value. .. versionchanged:: 2.0.0 Matches the WHATWG HTML Standard as of 2021/06/10. Control characters are no longer percent encoded. .. versionchanged:: 2.0.0 Renamed from ``format_header_param_html5`` and ``format_header_param``. The old names will be removed in urllib3 v2.1.0. rz%0Az%0Dz%22) "rr)r$r%r& translate)rrs r format_multipart_header_paramr5Os[<%& W%% OOEu== > >E  e   rcdddl}|dtdt||S) .. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v2.1.0. rNz'format_header_param_html5' has been renamed to 'format_multipart_header_param'. The old name will be removed in urllib3 v2.1.0.rrr!r"r#r5rrr!s r format_header_param_html5r:uG OOO MM %   )u 5 55rcdddl}|dtdt||S)r7rNzz'format_header_param' has been renamed to 'format_multipart_header_param'. The old name will be removed in urllib3 v2.1.0.rrr8r9s r format_header_paramr=r;rcfeZdZdZ ddd Ze dd dZd!dZd"dZd#dZ dd$dZ dS)% RequestFielda A data container for request body parameters. :param name: The name of this request field. Must be unicode. :param data: The data/value body. :param filename: An optional filename of the request field. Must be unicode. :param headers: An optional dict-like object of headers to initially use for the field. .. versionchanged:: 2.0.0 The ``header_formatter`` parameter is deprecated and will be removed in urllib3 v2.1.0. Nrrdatarrrheaderstyping.Mapping[str, str] | Noneheader_formatter5typing.Callable[[str, _TYPE_FIELD_VALUE], str] | Nonec||_||_||_i|_|rt ||_|*ddl}|dtd||_dSt|_dS)NrzUThe 'header_formatter' parameter is deprecated and will be removed in urllib3 v2.1.0.rr) _name _filenamer@rAdictr!r"r#rCr5)selfrr@rrArCr!s r __init__zRequestField.__init__s ! .0  )==DL  ' OOO MM5"     %5D ! ! !$AD ! ! !r fieldnamer_TYPE_FIELD_VALUE_TUPLEr ct|trt|dkr""""*+/#''+ <<<<<<r~s-"""""" Le, , L''( L',-/*D      ----`####L6666$6666$@<@<@<@<@<@<@<@<@<@>33 e\ * * 3KKKK*E2 2 2 2 2 33rboundary str | Nonetuple[bytes, str]ct}|t}t|D]}|d|ddt |||j}t|trt|}t|tr#t ||n|||d|d|ddd|}| |fS)a Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). Values are processed by :func:`urllib3.fields.RequestField.from_tuples`. :param boundary: If not specified, then a random boundary will be generated using :func:`urllib3.filepost.choose_boundary`. Nz--z zlatin-1s z-- zmultipart/form-data; boundary=) rrr!writeencodewriterrender_headersdatarintr getvalue)rr"bodyr r* content_types rencode_multipart_formdatar/3sM 99D"$$#F++ &&&&--i88999t 5//11222z dC  t99D dC   4LL  t $ $ $ $ JJt    7JJ$H$$$++I66777>H>>L ==??L ((r)r r )rrr r)N)rrr"r#r r$) __future__rr codecsrriorrrrlookupr(SequenceUnionTupler _TYPE_FIELDS_SEQUENCErrrr!r/rrrr8s"""""" 99999999 w   " Lc#::;\IJ| N3//02 55553333.26&)&)&)&)&)&)&)rPKQZ֕bb'__pycache__/poolmanager.cpython-311.pycnu[ bgxXddlmZddlZddlZddlZddlZddlmZddlm Z ddl m Z ddl m Z ddlmZdd lmZmZmZdd lmZmZmZmZdd lmZdd lmZdd lmZddlm Z ddl!m"Z"ddl#m$Z$m%Z%ej&r ddl'Z'ddl(m)Z)gdZ*ej+e,Z-dZ.dZ/ej0dZ1Gddej2Z3d(dZ4ej5e4e3ej5e4e3dZ6eedZ7Gdd e Z8Gd!d"e8Z9d)d'Z:dS)*) annotationsN) TracebackType)urljoin)RecentlyUsedContainer)RequestMethods) ProxyConfig)HTTPConnectionPoolHTTPSConnectionPoolport_by_scheme)LocationValueError MaxRetryErrorProxySchemeUnknownURLSchemeUnknown)BaseHTTPResponse)_TYPE_SOCKET_OPTIONS)connection_requires_http_tunnel)Retry)Timeout)Url parse_url)Literal) PoolManager ProxyManagerproxy_from_url) key_file cert_file cert_reqsca_certs ssl_versionssl_minimum_versionssl_maximum_version ca_cert_dir ssl_context key_passwordserver_hostnamei@_SelfTc,eZdZUdZded<ded<ded<ded<d ed <d ed <d ed<ded<ded<ded<ded<ded<ded<ded<ded<ded<ded<ded<ded<d ed!<ded"<d#ed$<d%ed&<ded'<d(ed)<ded*<ded+<ded,<d-S).PoolKeyz All known keyword arguments that could be provided to the pool manager, its pools, or the underlying connections. All custom key schemes should include the fields in this key at a minimum. str key_schemekey_host int | Nonekey_portzTimeout | float | int | None key_timeoutzRetry | bool | int | None key_retriesz bool | None key_blockztuple[str, int] | Nonekey_source_address str | None key_key_filekey_key_password key_cert_file key_cert_reqs key_ca_certszint | str | Nonekey_ssl_versionzssl.TLSVersion | Nonekey_ssl_minimum_versionkey_ssl_maximum_versionkey_ca_cert_dirssl.SSLContext | Nonekey_ssl_context key_maxsizez!frozenset[tuple[str, str]] | None key_headers Url | None key__proxykey__proxy_headersProxyConfig | Nonekey__proxy_configz_TYPE_SOCKET_OPTIONS | Nonekey_socket_optionskey__socks_optionszbool | str | Nonekey_assert_hostnamekey_assert_fingerprintkey_server_hostname key_blocksizeN)__name__ __module__ __qualname____doc____annotations__J/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/poolmanager.pyr)r)9shOOOMMM----****....    %%%%22222222****22229999))))33339999****&&&&####rRr) key_class type[PoolKey]request_contextdict[str, typing.Any]returncX|}|d|d<|d|d<dD]8}||vr2||*t||||<9|d}|t ||d<t |D]}|||d|z<|j D] }||vrd||< |d t|d<|di|S) a Create a pool key out of a request context dictionary. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :type key_class: namedtuple :param request_context: A dictionary-like object that contain the context for a request. :type request_context: dict :return: A namedtuple that can be used as a connection pool key. :rtype: PoolKey schemehost)headers_proxy_headers_socks_optionsNsocket_optionskey_rKrQ) copylower frozensetitemsgettuplelistkeyspop_fields_DEFAULT_BLOCKSIZE)rTrVcontextkey socket_optsfields rS_default_key_normalizerrp_sV.""$$G)//11GHfo++--GFO?;; '>>gcl6$WS\%7%7%9%9::GCL++.//K$)+$6$6 !GLLNN##11 ' C 0 0 """   !GEN{{?##+#5 9  w  rRhttphttpsceZdZUdZdZded<dZded< d=d>fd Zd?dZd@dZ dAdBd#Z dCd$Z dDdEd)Z dFd+Z dGd.Z dAdHd0ZdId2ZdJd6Z dKdLd<ZxZS)MraV Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example: .. code-block:: python import urllib3 http = urllib3.PoolManager(num_pools=2) resp1 = http.request("GET", "https://google.com/") resp2 = http.request("GET", "https://google.com/mail") resp3 = http.request("GET", "https://yahoo.com/") print(len(http.pools)) # 2 NrAproxyrD proxy_config num_poolsintr\typing.Mapping[str, str] | Noneconnection_pool_kw typing.AnyrXNonec t|||_|t||_t |_t |_dSN)super__init__r{rpoolspool_classes_by_schemekey_fn_by_schemera)selfrxr\r{ __class__s rSrzPoolManager.__init__s^ !!!"4 F*955 '=# 0 5 5 7 7rRrr'c|SrrQrs rS __enter__zPoolManager.__enter__s rRexc_typetype[BaseException] | Noneexc_valBaseException | Noneexc_tbTracebackType | NoneLiteral[False]c.|dS)NF)clear)rrrrs rS__exit__zPoolManager.__exit__s urRrZr*r[portrVdict[str, typing.Any] | Noner c&|j|}||j}|d t|d<dD]}||d|dkr t D]}||d|||fi|S)a Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments to the pool class used. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. N blocksize)rZr[rrr)rr{rarerkri SSL_KEYWORDS)rrZr[rrVpool_clsrmkws rS _new_poolzPoolManager._new_pools .2-H-P  ""5::<>$*$4f! M!%oh&?&E&E&G&GLLD"&"&++O<<"       ) ) ) *0022#488@@# +"6** *''88,,X,WWWrRrr)c|jj5|j|}|r|cdddS|d}|d}|d}|||||}||j|<dddn #1swxYwY|S)a Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. NrZr[rr)rlockrer)rrrVpoolrZr[rs rSrz$PoolManager.connection_from_pool_keyJsZ_ ( (:>>(++D  ( ( ( ( ( ( ( (%X.F"6*D"6*D>>&$o>VVD#'DJx  ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( sA?;A??BBurlcpt|}||j|j|j|S)a Similar to :func:`urllib3.connectionpool.connection_from_url`. If ``pool_kwargs`` is not provided and a new pool needs to be constructed, ``self.connection_pool_kw`` is used to initialize the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` is provided, it is used instead. Note that if a new pool does not need to be created for the request, the provided ``pool_kwargs`` are not used. )rrZr)rrr[rrZ)rrrus rSconnection_from_urlzPoolManager.connection_from_urlds; cNN(( Fk)   rRoverridec|j}|r6|D]!\}}| ||= #t$rYwxYw|||<"|S)a Merge a dictionary of override values for self.connection_pool_kw. This does not modify self.connection_pool_kw and returns a new dict. Any keys in the override dictionary with a value of ``None`` are removed from the merged dictionary. )r{rardKeyError)rrbase_pool_kwargsrmvalues rSrzPoolManager._merge_pool_kwargsvs 27799  2&nn.. 2 2 U=,S11#-2$S))s< A A  parsed_urlrboolcV|jdSt|j|j|j S)z Indicates if the proxy requires the complete destination URL in the request. Normally this is only needed when not using an HTTP CONNECT tunnel. NF)rurrvrZ)rrs rS!_proxy_requires_url_absolute_formz-PoolManager._proxy_requires_url_absolute_forms7 : 52 J):+<    rRTmethodredirectrrc Vt|}|jtjdtd||j|j|j}d|d<d|d<d |vr |j|d <| |r|j ||fi|}n|j ||j fi|}|o| }|s|St||}|jd krd }|d } t!| t"st#j| | } | jrp||s[|d } |d D]3} | | jvr| | d4| |d < | ||||} n.#t2$r!| jr||cYSwxYw| |d <||d<t8d||||j ||fi|S)aN Same as :meth:`urllib3.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. Na URLs without a scheme (ie 'https://') are deprecated and will raise an error in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs start with 'https://' or 'http://'. Read more in this issue: https://github.com/urllib3/urllib3/issues/2920)category stacklevel)rrZFassert_same_hostrr\i/GETretries)r)response_poolzRedirecting %s -> %s)rrZrrrrr[rr\rurlopen request_uriget_redirect_locationrstatusre isinstancerfrom_intremove_headers_on_redirect is_same_hostrarbri incrementrraise_on_redirect drain_connloginfo) rrrrrrconnrredirect_locationr new_headersheaders rSrzPoolManager.urlopens cNN 8  MA,     ((afQX(NN!& : B   LByM  1 1! 4 4 A#t|FC66266HH#t|FAM@@R@@H$I)G)G)I)I  O$C):;; ?c ! !F&&##'5)) AnWx@@@G  - (d6G6G 7 7  (Y-,,..KY- 2 2<<>>W%GGGOOFD111'ByM ''hd'SSGG   ( ##%%%OOO    9 !: '.?@@@t|F$5<<<< Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary containing headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. :param proxy_ssl_context: The proxy SSL context is used to establish the TLS connection to the proxy when using HTTPS proxies. :param use_forwarding_for_https: (Defaults to False) If set to True will forward requests to the HTTPS proxy to be made on behalf of the client instead of creating a TLS tunnel via the CONNECT method. **Enabling this flag means that request and response headers and content will be visible from the HTTPS proxy** whereas tunneling keeps request and response headers and content private. IP address, target hostname, SNI, and port are always visible to an HTTPS proxy even when this flag is disabled. :param proxy_assert_hostname: The hostname of the certificate to verify against. :param proxy_assert_fingerprint: The fingerprint of the certificate to verify against. Example: .. code-block:: python import urllib3 proxy = urllib3.ProxyManager("https://localhost:3128/") resp1 = proxy.request("GET", "https://google.com/") resp2 = proxy.request("GET", "https://httpbin.org/") print(len(proxy.pools)) # 1 resp3 = proxy.request("GET", "https://httpbin.org/") resp4 = proxy.request("GET", "https://twitter.com/") print(len(proxy.pools)) # 3 rwNF proxy_urlr*rxryr\rz proxy_headersproxy_ssl_contextr=use_forwarding_for_httpsrproxy_assert_hostnameNone | str | Literal[False]proxy_assert_fingerprintr3r{r|rXr}c  t|tr|jd|jd|j} n|} t | } | jdvrt | j| js0tj| jd} | | } | |_ |pi|_ ||_ t|||||_|j | d<|j | d<|j| d<tj||fi| dS) Nz://:rqr)r_proxyr] _proxy_config)rr rZr[rrrr re_replacerurrr rvrr)rrrxr\rrrrrr{ str_proxy_urlrurrs rSrzProxyManager.__init__ s+ i!3 4 4 &(/UUINUUY^UUMM%M-(( <0 0 0$U\22 2z .!%elB77DNNN--E *0b!2'  $ ! $   (,z8$/3/A+,.2.??+GBB/ABBBBBrRrrr[rr-rZrrr c|dkr%t||||St|jj|jj|jj|S)Nrs)r)rrrur[rrZ)rr[rrZrrs rSrz!ProxyManager.connection_from_hostIss W  77//dF 0 ww++ JOTZ_dj.?[,   rRrtyping.Mapping[str, str]crddi}t|j}|r||d<|r|||S)z Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. Acceptz*/*Host)rnetlocupdate)rrr\headers_rs rS_set_proxy_headerszProxyManager._set_proxy_headersYsMe$3&  &%HV   % OOG $ $ $rRTrrrrc t|}t|j|j|js4|d|j}||||d<tj ||fd|i|S)z@Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute.r\r) rrrurvrZrer\rrr)rrrrrrr\rs rSrzProxyManager.urlopenjs cNN.tz4;LahWW BffY 55G 33CAAByMuwwvsDDXDDDDrR)rwNNNFNN)rr*rxryr\rzrrzrr=rrrrrr3r{r|rXr}rrr)rr*r\rzrXrrr) rLrMrNrOrrrrrrs@rSrrs44r379=37).=A/3'C'C'C'C'C'C'CX #48        "DH$7; E E E E E E E E E E ErRrrr*rr|c tdd|i|S)NrrQ)r)rrs rSrrys  , ,# , , ,,rR)rTrUrVrWrXr))rr*rr|rXr); __future__r functoolsloggingtypingrtypesr urllib.parser _collectionsr_request_methodsr connectionr connectionpoolr r r exceptionsr rrrrrutil.connectionr util.proxyr util.retryr util.timeoutrutil.urlrr TYPE_CHECKINGssltyping_extensionsr__all__ getLoggerrLrrrkTypeVarr' NamedTupler)rppartialrrrrrrQrRrSrs""""""  //////,,,,,,######SSSSSSSSSS '&&&&&111111777777!!!!!!$$$$$$$$ *JJJ)))))) ; ; ;g!!    ! !#####f###L4 4 4 4 x I 5w ? ? Y 6 @ @ #5?RSSD=D=D=D=D=.D=D=D=N MEMEMEMEME;MEMEME`------rRPKQZ:>$__pycache__/response.cpython-311.pycnu[ bg ~ddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddlmZddlmZddlmZ ddlZn#e$rddlZYnwxYwn #e$rdZYnwxYw ddlZeeeejdej xZ!Z!e!dkrdZn#e"ee#f$rdZYnwxYwd d l$m%Z%d d l&m'Z'd d l(m)Z)d d l*m+Z+m,Z,m-Z-d dl.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6m7Z7m8Z8d dl9m:Z:m;Z;d dlr ddl?m@Z@d dlAmBZBejCeDZEGddZFGddeFZGGddZHGddeFZIeGddeFZJeGddeFZKGdd eFZLd+d$ZMGd%d&ZNGd'd(ejOZPGd)d*ePZdS),) annotationsN)contextmanager) HTTPMessage) HTTPResponse)timeoutz^([0-9]+)\.([0-9]+))r)util) _TYPE_BODY)HTTPHeaderDict) BaseSSLErrorHTTPConnection HTTPException) BodyNotHttplibCompatible DecodeError HTTPErrorIncompleteReadInvalidChunkLength InvalidHeader ProtocolErrorReadTimeoutErrorResponseNotChunkedSSLError) is_fp_closedis_response_to_head)Retry)Literal)HTTPConnectionPoolceZdZddZddZdS) ContentDecoderdatabytesreturnctNNotImplementedErrorselfr!s G/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/response.py decompresszContentDecoder.decompressE!###ctr%r&r)s r*flushzContentDecoder.flushHr,r-Nr!r"r#r"r#r")__name__ __module__ __qualname__r+r0r-r*r r Ds<$$$$$$$$$$r-r c&eZdZd dZd dZd dZdS) DeflateDecoderr#NonecRd|_d|_tj|_dS)NTr-) _first_try_datazlib decompressobj_objr/s r*__init__zDeflateDecoder.__init__Ms$ &(( r-r!r"c|s|S|js|j|S|xj|z c_ |j|}|rd|_d|_|S#tj$r\d|_t jtj |_ ||jd|_cYS#d|_wxYwwxYw)NF)r;r?r+r<r=errorr> MAX_WBITS)r)r! decompresseds r*r+zDeflateDecoder.decompressRs K .9''-- - d  "9//55L ""'!  z " " "#DO*DN?;;DI "tz22! T !!!!  "s#+A##:CC7C C  Cc4|jSr%r?r0r/s r*r0zDeflateDecoder.flushhy   r-Nr#r9r1r2r3r4r5r@r+r0r6r-r*r8r8LsP)))) """",!!!!!!r-r8ceZdZdZdZdZdS)GzipDecoderStaterr N)r3r4r5 FIRST_MEMBER OTHER_MEMBERS SWALLOW_DATAr6r-r*rKrKlsLMLLLr-rKc&eZdZd dZd dZd dZdS) GzipDecoderr#r9cttjdtjz|_tj|_dS)N)r=r>rCr?rKrM_stater/s r*r@zGzipDecoder.__init__ss)&rDN':;; &3 r-r!r"ct}|jtjks|st |S ||j|z }nM#tj$r;|j}tj|_|tj krt |cYSwxYw|jj }|st |Stj |_tj dtj z|_)NTrS) bytearrayrTrKrOr"r?r+r=rBrN unused_datar>rC)r)r!retprevious_states r*r+zGzipDecoder.decompresswskk ;*7 7 7t 7::  @ ty++D111:   !%.; !%5%CCC ::%%% 9(D "Szz!*8DK*2+>??DI @sAABBc4|jSr%rFr/s r*r0zGzipDecoder.flushrGr-NrHr1r2rIr6r-r*rQrQrsT4444@@@@*!!!!!!r-rQceZdZddZddZdS) BrotliDecoderr#r9ctj|_t|jdrt |d|jjdSt |d|jjdS)Nr+)brotli Decompressorr?hasattrsetattrr+processr/s r*r@zBrotliDecoder.__init__s`+--DIty,// ?lDI,@AAAAAlDI,=>>>>>r-r"cbt|jdr|jSdS)Nr0r-)r`r?r0r/s r*r0zBrotliDecoder.flushs-ty'** )y(((3r-NrHr2)r3r4r5r@r0r6r-r*r\r\s< ? ? ? ?      r-r\c&eZdZd dZd dZd dZdS) ZstdDecoderr#r9cZtj|_dSr%)zstdZstdDecompressorr>r?r/s r*r@zZstdDecoder.__init__s"-//==??DIIIr-r!r"c|sdS|j|g}|jjr|jjr{|jj}t j|_||j||jjr |jj{d|S)Nr-) r?r+eofrWrgrhr>appendjoin)r)r! data_partsrWs r*r+zZstdDecoder.decompresss s)..t445J)- EDI$9 E"i3  133AACC !!$)"6"6{"C"CDDD)- EDI$9 E88J'' 'r-cn|j}|jjstd|S)NzZstandard data is incomplete)r?r0rjr)r)rXs r*r0zZstdDecoder.flushs4)//##C9= B!"@AAAJr-NrHr1r2rIr6r-r*reresT @ @ @ @ ( ( ( (      r-rec*eZdZdZd dZd dZdd Zd S) MultiDecodera From RFC7231: If one or more encodings have been applied to a representation, the sender that applied the encodings MUST generate a Content-Encoding header field that lists the content codings in the order in which they were applied. modesstrr#r9cNd|dD|_dS)NcPg|]#}t|$Sr6) _get_decoderstrip).0ms r* z)MultiDecoder.__init__..s(LLLa,qwwyy11LLLr-,)split _decoders)r)rqs r*r@zMultiDecoder.__init__s'LL5;;s;K;KLLLr-r"c@|jdSNr)r|r0r/s r*r0zMultiDecoder.flushs~a &&(((r-r!c^t|jD]}||}|Sr%)reversedr|r+)r)r!ds r*r+zMultiDecoder.decompresss3$.)) & &A<<%%DD r-N)rqrrr#r9r2r1)r3r4r5__doc__r@r0r+r6r-r*rprps^MMMM))))r-rpmoderrr#cd|vrt|S|dkrtSt|dkrtSt|dkrt St S)Nrzgzipbrrg)rprQr^r\rgrer8)rs r*rurush d{{D!!! v~~}} ddll DFNN}}   r-c2eZdZdZd dZddZdd Zdd Zd S)BytesQueueBuffera Memory-efficient bytes buffer To return decoded data in read() and still follow the BufferedIOBase API, we need a buffer to always return the correct amount of bytes. This buffer should be filled using calls to put() Our maximum memory usage is determined by the sum of the size of: * self.buffer, which contains the full data * the largest chunk that we will copy in get() The worst case scenario is a single chunk, in which case we'll make a full copy of the data inside get(). r#r9cDtj|_d|_dSr~) collectionsdequebuffer_sizer/s r*r@zBytesQueueBuffer.__init__s+6+<+>+>  r-intc|jSr%)rr/s r*__len__zBytesQueueBuffer.__len__s zr-r!r"ct|j||xjt|z c_dSr%)rrkrlenr(s r*putzBytesQueueBuffer.puts2 4    c$ii r-ncD|dkrdS|jstd|dkrtdd}tj}||kr||z }|j}t |}||krT|d|||d}}|||j||xj |zc_ n8|||xj |zc_ ||z }|jsn||k| S)Nrr-zbuffer is emptyzn should be > 0) r RuntimeError ValueErrorioBytesIOpopleftrwrite appendleftrgetvalue) r)rfetchedrX remainingchunk chunk_length left_chunk right_chunks r*getzBytesQueueBuffer.gets6 663 0011 1 UU.// /jllkkG IK''))Eu::L<''*/  *;U9::=NK  *%%% &&{333 i'  %    l* | #G; !kk$||~~r-NrHr#r)r!r"r#r9)rrr#r")r3r4r5rr@rrrr6r-r*rrsn     r-rceZdZUddgZeedgz Zeedgz ZgdZeej fZ de d<e e ej fz Z e e ej fz Z ddd dCdZ dDdZedEdZdFdZedGdZejdHd!ZedId#ZedJd$ZejdKd%Z dLdMd+Z dNdOd.Z dPdMd/ZdQd0ZdQd1ZdQd2ZdQd3ZdRd6ZdEd7ZdSd:ZdTd<Z dUdVd@Z!dTdAZ"dGdBZ#dS)WBaseHTTPResponserdeflateNrrg)i-i.i/i3i4ztuple[type[Exception], ...]DECODER_ERROR_CLASSES)headersretriesr>typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | Nonestatusrversionreason str | Nonedecode_contentbool request_urlr Retry | Noner#r9ct|tr||_nt||_||_||_||_||_d|_||_||_ d|_ |j dd }d| dD} d| vrd|_ d|_dS)NFztransfer-encodingc3>K|]}|VdSr%)rv)rwencs r* z,BaseHTTPResponse.__init__..Cs*>>SSYY[[>>>>>>r-rzchunkedT) isinstancer rrrrr_has_decoded_content _request_urlrrrlowerr{_decoder) r)rrrrrrrtr_enc encodingss r*r@zBaseHTTPResponse.__init__)s g~ . . 3"DLL)'22DL   ,$)!(3  !!"5r::@@BB>>FLL,=,=>>>  ! !DL/3 r-str | None | Literal[False]cV|j|jvr|jdSdS)a Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. locationF)rREDIRECT_STATUSESrrr/s r*get_redirect_locationz&BaseHTTPResponse.get_redirect_locationIs/ ;$0 0 0<##J// /ur-r"ctr%r&r/s r*r!zBaseHTTPResponse.dataU!###r- typing.Anyc^|jd}tj|S)a Parses the body of the HTTP response as JSON. To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. Read more :ref:`here `. zutf-8)r!decode_jsonloadsr(s r*jsonzBaseHTTPResponse.jsonYs)y(({4   r-ctr%r&r/s r*urlzBaseHTTPResponse.urlfrr-rctr%r&r)rs r*rzBaseHTTPResponse.urljrr-HTTPConnection | Nonectr%r&r/s r* connectionzBaseHTTPResponse.connectionnrr-c|jSr%)_retriesr/s r*rzBaseHTTPResponse.retriesrs }r-cT||jr|jdj|_||_dS)N)historyredirect_locationrr)r)rs r*rzBaseHTTPResponse.retriesvs-  7? r*.sAwwyyD$999GGII999r-)rrrrrrur{)r)content_encodingrs` r* _init_decoderzBaseHTTPResponse._init_decoders  <++,>CCIIKK = 4#888 ,-= > > (((-33C88 C$01A$B$BDMMM ! )( CCr-r! flush_decodercf|s|jrtd|S |jr!|j|}d|_nS#|j$rF}|jdd}td|z||d}~wwxYw|r|| z }|S)zN Decode the data passed in and potentially flush the decoder. _Calling read(decode_content=False) is not supported after read(decode_content=True) was called.TrrzEReceived response with content-encoding: %s, but failed to decode it.N) rrrr+rrrrr_flush_decoder)r)r!rrrrs r*_decodezBaseHTTPResponse._decodes  ( "<K } 1}//55,0))   #|//0BBGGMMOO ')9:     * D'')) )D s(A BABBc||jr4|jd|jzSdS)zk Flushes the decoder. Should only be called if the decoder is actually being used. r-)rr+r0r/s r*rzBaseHTTPResponse._flush_decoders= = I=++C004=3F3F3H3HH Hsr-brVc|t|}t|dkrdS||dt|<t|Sr~)rr)r)rtemps r*readintozBaseHTTPResponse.readintosLyyQ   t99>>1!AkD kNt99 r-r cHtjdtd|jS)Nz|HTTPResponse.getheaders() is deprecated and will be removed in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.rLcategory stacklevel)warningswarnDeprecationWarningrr/s r* getheaderszBaseHTTPResponse.getheaderss1  O'     |r-namerrdefaultcptjdtd|j||S)NzHTTPResponse.getheader() is deprecated and will be removed in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).rLr)rrrrr)r)rr s r* getheaderzBaseHTTPResponse.getheadersA  V'     |g...r-c|jSr%)rr/s r*infozBaseHTTPResponse.infos |r-c|jSr%)rr/s r*geturlzBaseHTTPResponse.geturls xr-)rrrrrrrrrrrrrrr#r9)r#rr2)r#rr#r)rrr#r9r#r)r#r)rrr#r9rN)rrrrr#rNNFrrrrrrr#r"NNrH)r!r"rrrrr#r")rrVr#r)r#r r%)rrrr rr#r)$r3r4r5rr^rgrIOErrorr=rBr__annotations__ ZstdErrorr@rpropertyr!rrsetterrrrrrrrrrrrrrr r rr6r-r*rrs * TF" VH$111:A4:9NNNNN &,0 $.!22 SW !%444444@    $$$X$ ! ! ! !$$$X$ Z$$$Z$$$$X$X ^   ^ FJ$$$$$&*# $$$$$&*$$$$$$$$$$$$$$$$$CCCC&</////r-rceZdZdZ dGdHfd" ZdId#ZdId$ZedJd&ZedKd'Z dLd(Z dMd)Z dNd+Z e dOd-ZdPdQd/Z dPdQd0Z dRdSd4Z dTdUd7ZdLd8ZdId9ZedLd:ZdMd;ZdId<ZdLd=ZdId>ZdQd?Z dVdUd@ZedWdAZejdXdDZdYdFZxZS)Zra HTTP Response container. Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param original_response: When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. rNrTbodyr rrrrrrrpreload_contentrroriginal_response_HttplibHTTPResponse | NonepoolHTTPConnectionPool | Nonerrmsg_HttplibHTTPMessage | Nonerrenforce_content_lengthrequest_methodr auto_closer#r9c t||||||| | |_||_d|_d|_||_d|_| |_|r#t|ttfr||_| |_ | |_ t|dr||_d|_|||_t%|_|r$|js|||_dSdSdS)N)rrrrrrrrrr)superr@r$r&_body_fp_original_response_fp_bytes_readr"rrrr"_pool _connectionr` chunk_left _init_lengthlength_remainingr_decoded_bufferr)r)rrrrrrrrr rr"rr$r%rr& __class__s r*r@zHTTPResponse.__init__s4& )#    '=#$ 04"3  Jtc5\22 DJ % 4  DH'+!% 1 1. A A 011  B4: B.AADJJJ B B B Br-cr|jr|jsdS|j|jd|_dSr%)r.r/ _put_connr/s r*rzHTTPResponse.release_connUsBz !1 4 T-...r-cx |dS#ttttf$rYdSwxYw)z Read and discard any remaining HTTP response data in the response connection. Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. N)rrOSErrorr rr/s r*rzHTTPResponse.drain_conn\sC   IIKKKKK7L-@    DD s 99r"c\|jr|jS|jr|dSdS)NT)r)r*r+rr/s r*r!zHTTPResponse.datags9 : :  8 1994900 0tr-c|jSr%)r/r/s r*rzHTTPResponse.connectionrs r-c*t|jSr%)rr+r/s r*isclosedzHTTPResponse.isclosedvsDH%%%r-c|jS)z Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). )r-r/s r*tellzHTTPResponse.tellys ""r-rc|jd}||jrtddS d|dD}t |dkrtd|z|}|dkrd}n#t$rd}YnwxYwd} t|j }n#t$rd}YnwxYw|d vsd |cxkrd ksn|d krd}|S) zM Set initial length value for Response content if available. zcontent-lengthNzReceived response with both Content-Length and Transfer-Encoding set. This is expressly forbidden by RFC 7230 sec 3.3.2. Ignoring Content-Length and attempting to process response as Transfer-Encoding: chunked.c,h|]}t|Sr6)r)rwvals r* z,HTTPResponse._init_length..sIII3s88IIIr-rzr z8Content-Length contained multiple unmatching values (%s)r)i0dHEAD) rrrlogwarningr{rrpoprrr)r)r%content_lengthlengthslengthrs r*r1zHTTPResponse._init_lengthsn &*\%5%56F%G%G  %|  t " JI~/C/CC/H/HIIIw<STTT- U U U U U U U U U U U U U U U0  3   3t99 ,  $0%%T2%% sA0B00B47B4Frrc4|||j}|4d}t|j|kr|j|S||}d}|d}n |dkr|sd}|st|jdkr|S|!||||}|r||_n|s|jrtd|S||||}|j |t|j|krb|r`||}||||}|j |t|j|kr|`|j|}|S)aT Similar to :meth:`http.client.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) NFTrr) rrrr3rrbrr*rrr)r)rrrr!r decoded_datas r*rzHTTPResponse.readKs4   !!0N ?!M4'((C//+//444~~c"" ; MM AXXdX M D011Q66K ;<<nmDDD "! " ,&@ <<nmLLL  $ $\ 2 2 2d*++c11d1~~c**#||D.-PP $((666 d*++c11d1'++C00D r-r#typing.Generator[bytes, None, None]c#lK|jr3|r|||Ed{VdSt|jrt |jdkrK|||}|r|Vt|j1t |jdkIdSdS)a_ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. r(Nr)rr)rsupports_chunked_readsrrr+rr3r)r)rrr!s r*rzHTTPResponse.streams$ < D7799 ((^(LL L L L L L L L L L"48,, D4H0I0IA0M0MyySyHHJJJ #48,, D4H0I0IA0M0M0M0M0M0Mr-cdS)NTr6r/s r*readablezHTTPResponse.readablestr-c|js |jr|j|jr|j|js!t j|dSdSr%)r_r+rr/r&rIOBaser/s r*rzHTTPResponse.closesu{ tx  HNN      %   " " $ $ $ " IOOD ! ! ! ! ! " "r-c |js$tjj|S|jdSt |jdr|jSt |jdr |jjSdS)NTr<r_)r&rrkr___get__r+r`r<r/s r*r_zHTTPResponse.closeds} 9#++D11 1 X 4 TXz * * 8$$&& & TXx ( ( 8? "4r-c|jtdt|jdr|jStd)Nz-HTTPResponse has no file to get a fileno fromfilenozOThe file-like object this HTTPResponse is wrapped around has no file descriptor)r+r8r`ror/s r*rozHTTPResponse.filenosV 8 IJJ J TXx ( ( 8??$$ $0 r-c|jDt|jdr1t|jdds|jSdSdSdS)Nr0r_F)r+r`r`r0r/s r*r0zHTTPResponse.flushs\ H '** !DHh66 !8>>## # ! r-c,t|jdS)a Checks if the underlying file-like object looks like a :class:`http.client.HTTPResponse` object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). fp)r`r+r/s r*rgz#HTTPResponse.supports_chunked_readsstx&&&r-c|jdS|jj}|ddd} t |d|_dS#t $r&|t||dwxYw)N;r rrS) r0r+rrreadliner{rrrr)r)lines r*_update_chunk_lengthz!HTTPResponse._update_chunk_lengths ? &4x{##%%zz$""1% ;!$mmDOOO ; ; ; JJLLL$T400d : ;s A0B c"d}|C|j|j}|}|jdd|_n|j7||jkr,|j|}|j|z |_|}n||jkr>|j|}|jdd|_|}n@|j|j}|jdd|_|S)NrL)r+ _safe_readr0)r)rreturned_chunkrvalues r* _handle_chunkzHTTPResponse._handle_chunks ;H''88E"N H   " " ""DOO _ (S4?-B-BH'',,E"o3DO"NN DO # #H'',,E H   " " ""DO"NN!X00AAN H   " " ""DOr-c#DK||jstd|st d|5|jr;t|jr'|j ddddS|j j  ddddS | |j dkrn4| |}|||d}|r|VT|r|}|r|V|j /|j j }|sn|dkrn|j /|jr|jddddS#1swxYwYdS) a Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. zHResponse is not chunked. Header 'transfer-encoding: chunked' is missing.zkBody should be http.client.HTTPResponse like. It should have have an fp attribute which returns raw chunks.NTrF)rrs )rrrrgrrQr,rrr+rrrwr0r|rrru)r)rrrdecodedrvs r*rzHTTPResponse.read_chunked sy | $B **,, *P   " ") 0) 0& +>t?V+W+W '--/// ) 0) 0) 0) 0) 0) 0) 0) 0x{") 0) 0) 0) 0) 0) 0) 0) 0 "))+++?a''**3//,,.'"!MMM " "--//"!MMM(&x{++--7?? (&& 0'--///S) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0) 0s$6F' FCFFFc|jS)z Returns the URL that was the source of this response. If the request that generated this response redirected, this method will return the final redirect location. rr/s r*rzHTTPResponse.urlTs   r-rrrc||_dSr%rrs r*rzHTTPResponse.url]sr-rc#jKg}|dD]|}d|vra|d}d||dzdzV|ddD] }|dzV |dr |dg}dg}g||}|rd|VdSdS)NTr( r-rr r)rr{rlrk)r)rrchunksxs r*__iter__zHTTPResponse.__iter__as [[[55 % %E~~U++hhv&&2U::::"$$Ae)OOOO": $Rj\FFFF e$$$$  #((6"" " " " " " # #r-)rNrrNTTNNNNNTNNT)"rr rrrrrrrrrrrrrrr r!rrr"r#rrr$rr%rrrr&rr#r9rHr2r)r#rr)r%rr#r)r#rMr%)rrr#r"rrr)rrrrr#rerr)rrrr#r9)r#r) r3r4r5rr@rrrr!rr<r>r1rrQr]rbrrrirr_ror0rgrwr|rrrr __classcell__)r4s@r*rrsDRV! $#9=*.,0*. $'+%)"&#:B:B:B:B:B:B:Bx        X   X &&&&####6666p5$5$5$^5$n.N.N.N.N.Nd(((((X&*# IIIIIXFJ8""""   X     $$$$'''' ; ; ; ;.EIF0F0F0F0F0P!!!X! Z   Z ########r-r)rrrr#r )Q __future__rrrrrloggingrerWtypingrr= contextlibr http.clientr_HttplibHTTPMessager_HttplibHTTPResponsesocketrrO brotlicffir^ ImportError zstandardrgtuplemaprsearch __version__groups _zstd_versionAttributeErrorrrr _base_connectionr _collectionsr rr rr exceptionsrrrrrrrrrr util.responserr util.retryr TYPE_CHECKINGtyping_extensionsrconnectionpoolr getLoggerr3rGr r8rKrQr\rerprurrkrr6r-r*rs""""""   %%%%%%::::::<<<<<<++++++#####   FFF %*E C143CDDKKMMNN%%MMw Z0 DDD((((((((((((CCCCCCCCCC                        =<<<<<<< 3))))))222222g!!$$$$$$$$!!!!!^!!!@ !!!!!.!!!< "n*>*     88888888v[[[[[ry[[[|x #x #x #x #x ##x #x #x #x #x #sCA A AAAAA$#A$(A B22 B?>B?PKQZ_base_connection.pynu[from __future__ import annotations import typing from .util.connection import _TYPE_SOCKET_OPTIONS from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT from .util.url import Url _TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str] class ProxyConfig(typing.NamedTuple): ssl_context: ssl.SSLContext | None use_forwarding_for_https: bool assert_hostname: None | str | Literal[False] assert_fingerprint: str | None class _ResponseOptions(typing.NamedTuple): # TODO: Remove this in favor of a better # HTTP request/response lifecycle tracking. request_method: str request_url: str preload_content: bool decode_content: bool enforce_content_length: bool if typing.TYPE_CHECKING: import ssl from typing_extensions import Literal, Protocol from .response import BaseHTTPResponse class BaseHTTPConnection(Protocol): default_port: typing.ClassVar[int] default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] host: str port: int timeout: None | ( float ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved. blocksize: int source_address: tuple[str, int] | None socket_options: _TYPE_SOCKET_OPTIONS | None proxy: Url | None proxy_config: ProxyConfig | None is_verified: bool proxy_is_verified: bool | None def __init__( self, host: str, port: int | None = None, *, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 8192, socket_options: _TYPE_SOCKET_OPTIONS | None = ..., proxy: Url | None = None, proxy_config: ProxyConfig | None = None, ) -> None: ... def set_tunnel( self, host: str, port: int | None = None, headers: typing.Mapping[str, str] | None = None, scheme: str = "http", ) -> None: ... def connect(self) -> None: ... def request( self, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, # We know *at least* botocore is depending on the order of the # first 3 parameters so to be safe we only mark the later ones # as keyword-only to ensure we have space to extend. *, chunked: bool = False, preload_content: bool = True, decode_content: bool = True, enforce_content_length: bool = True, ) -> None: ... def getresponse(self) -> BaseHTTPResponse: ... def close(self) -> None: ... @property def is_closed(self) -> bool: """Whether the connection either is brand new or has been previously closed. If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` properties must be False. """ @property def is_connected(self) -> bool: """Whether the connection is actively connected to any origin (proxy or target)""" @property def has_connected_to_proxy(self) -> bool: """Whether the connection has successfully connected to its proxy. This returns False if no proxy is in use. Used to determine whether errors are coming from the proxy layer or from tunnelling to the target origin. """ class BaseHTTPSConnection(BaseHTTPConnection, Protocol): default_port: typing.ClassVar[int] default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] # Certificate verification methods cert_reqs: int | str | None assert_hostname: None | str | Literal[False] assert_fingerprint: str | None ssl_context: ssl.SSLContext | None # Trusted CAs ca_certs: str | None ca_cert_dir: str | None ca_cert_data: None | str | bytes # TLS version ssl_minimum_version: int | None ssl_maximum_version: int | None ssl_version: int | str | None # Deprecated # Client certificates cert_file: str | None key_file: str | None key_password: str | None def __init__( self, host: str, port: int | None = None, *, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 8192, socket_options: _TYPE_SOCKET_OPTIONS | None = ..., proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, assert_hostname: None | str | Literal[False] = None, assert_fingerprint: str | None = None, server_hostname: str | None = None, ssl_context: ssl.SSLContext | None = None, ca_certs: str | None = None, ca_cert_dir: str | None = None, ca_cert_data: None | str | bytes = None, ssl_minimum_version: int | None = None, ssl_maximum_version: int | None = None, ssl_version: int | str | None = None, # Deprecated cert_file: str | None = None, key_file: str | None = None, key_password: str | None = None, ) -> None: ... PKQZTAA_collections.pynu[from __future__ import annotations import typing from collections import OrderedDict from enum import Enum, auto from threading import RLock if typing.TYPE_CHECKING: # We can only import Protocol if TYPE_CHECKING because it's a development # dependency, and is not available at runtime. from typing_extensions import Protocol class HasGettableStringKeys(Protocol): def keys(self) -> typing.Iterator[str]: ... def __getitem__(self, key: str) -> str: ... __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] # Key type _KT = typing.TypeVar("_KT") # Value type _VT = typing.TypeVar("_VT") # Default type _DT = typing.TypeVar("_DT") ValidHTTPHeaderSource = typing.Union[ "HTTPHeaderDict", typing.Mapping[str, str], typing.Iterable[typing.Tuple[str, str]], "HasGettableStringKeys", ] class _Sentinel(Enum): not_passed = auto() def ensure_can_construct_http_header_dict( potential: object, ) -> ValidHTTPHeaderSource | None: if isinstance(potential, HTTPHeaderDict): return potential elif isinstance(potential, typing.Mapping): # Full runtime checking of the contents of a Mapping is expensive, so for the # purposes of typechecking, we assume that any Mapping is the right shape. return typing.cast(typing.Mapping[str, str], potential) elif isinstance(potential, typing.Iterable): # Similarly to Mapping, full runtime checking of the contents of an Iterable is # expensive, so for the purposes of typechecking, we assume that any Iterable # is the right shape. return typing.cast(typing.Iterable[typing.Tuple[str, str]], potential) elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"): return typing.cast("HasGettableStringKeys", potential) else: return None class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]): """ Provides a thread-safe dict-like container which maintains up to ``maxsize`` keys while throwing away the least-recently-used keys beyond ``maxsize``. :param maxsize: Maximum number of recent elements to retain. :param dispose_func: Every time an item is evicted from the container, ``dispose_func(value)`` is called. Callback which will get called """ _container: typing.OrderedDict[_KT, _VT] _maxsize: int dispose_func: typing.Callable[[_VT], None] | None lock: RLock def __init__( self, maxsize: int = 10, dispose_func: typing.Callable[[_VT], None] | None = None, ) -> None: super().__init__() self._maxsize = maxsize self.dispose_func = dispose_func self._container = OrderedDict() self.lock = RLock() def __getitem__(self, key: _KT) -> _VT: # Re-insert the item, moving it to the end of the eviction line. with self.lock: item = self._container.pop(key) self._container[key] = item return item def __setitem__(self, key: _KT, value: _VT) -> None: evicted_item = None with self.lock: # Possibly evict the existing value of 'key' try: # If the key exists, we'll overwrite it, which won't change the # size of the pool. Because accessing a key should move it to # the end of the eviction line, we pop it out first. evicted_item = key, self._container.pop(key) self._container[key] = value except KeyError: # When the key does not exist, we insert the value first so that # evicting works in all cases, including when self._maxsize is 0 self._container[key] = value if len(self._container) > self._maxsize: # If we didn't evict an existing value, and we've hit our maximum # size, then we have to evict the least recently used item from # the beginning of the container. evicted_item = self._container.popitem(last=False) # After releasing the lock on the pool, dispose of any evicted value. if evicted_item is not None and self.dispose_func: _, evicted_value = evicted_item self.dispose_func(evicted_value) def __delitem__(self, key: _KT) -> None: with self.lock: value = self._container.pop(key) if self.dispose_func: self.dispose_func(value) def __len__(self) -> int: with self.lock: return len(self._container) def __iter__(self) -> typing.NoReturn: raise NotImplementedError( "Iteration over this class is unlikely to be threadsafe." ) def clear(self) -> None: with self.lock: # Copy pointers to all values, then wipe the mapping values = list(self._container.values()) self._container.clear() if self.dispose_func: for value in values: self.dispose_func(value) def keys(self) -> set[_KT]: # type: ignore[override] with self.lock: return set(self._container.keys()) class HTTPHeaderDictItemView(typing.Set[typing.Tuple[str, str]]): """ HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of address. If we directly try to get an item with a particular name, we will get a string back that is the concatenated version of all the values: >>> d['X-Header-Name'] 'Value1, Value2, Value3' However, if we iterate over an HTTPHeaderDict's items, we will optionally combine these values based on whether combine=True was called when building up the dictionary >>> d = HTTPHeaderDict({"A": "1", "B": "foo"}) >>> d.add("A", "2", combine=True) >>> d.add("B", "bar") >>> list(d.items()) [ ('A', '1, 2'), ('B', 'foo'), ('B', 'bar'), ] This class conforms to the interface required by the MutableMapping ABC while also giving us the nonstandard iteration behavior we want; items with duplicate keys, ordered by time of first insertion. """ _headers: HTTPHeaderDict def __init__(self, headers: HTTPHeaderDict) -> None: self._headers = headers def __len__(self) -> int: return len(list(self._headers.iteritems())) def __iter__(self) -> typing.Iterator[tuple[str, str]]: return self._headers.iteritems() def __contains__(self, item: object) -> bool: if isinstance(item, tuple) and len(item) == 2: passed_key, passed_val = item if isinstance(passed_key, str) and isinstance(passed_val, str): return self._headers._has_value_for_header(passed_key, passed_val) return False class HTTPHeaderDict(typing.MutableMapping[str, str]): """ :param headers: An iterable of field-value pairs. Must not contain multiple field names when compared case-insensitively. :param kwargs: Additional field-value pairs to pass in to ``dict.update``. A ``dict`` like container for storing HTTP Headers. Field names are stored and compared case-insensitively in compliance with RFC 7230. Iteration provides the first case-sensitive key seen for each case-insensitive pair. Using ``__setitem__`` syntax overwrites fields that compare equal case-insensitively in order to maintain ``dict``'s api. For fields that compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` in a loop. If multiple fields that are equal case-insensitively are passed to the constructor or ``.update``, the behavior is undefined and some will be lost. >>> headers = HTTPHeaderDict() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' """ _container: typing.MutableMapping[str, list[str]] def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str): super().__init__() self._container = {} # 'dict' is insert-ordered in Python 3.7+ if headers is not None: if isinstance(headers, HTTPHeaderDict): self._copy_from(headers) else: self.extend(headers) if kwargs: self.extend(kwargs) def __setitem__(self, key: str, val: str) -> None: # avoid a bytes/str comparison by decoding before httplib if isinstance(key, bytes): key = key.decode("latin-1") self._container[key.lower()] = [key, val] def __getitem__(self, key: str) -> str: val = self._container[key.lower()] return ", ".join(val[1:]) def __delitem__(self, key: str) -> None: del self._container[key.lower()] def __contains__(self, key: object) -> bool: if isinstance(key, str): return key.lower() in self._container return False def setdefault(self, key: str, default: str = "") -> str: return super().setdefault(key, default) def __eq__(self, other: object) -> bool: maybe_constructable = ensure_can_construct_http_header_dict(other) if maybe_constructable is None: return False else: other_as_http_header_dict = type(self)(maybe_constructable) return {k.lower(): v for k, v in self.itermerged()} == { k.lower(): v for k, v in other_as_http_header_dict.itermerged() } def __ne__(self, other: object) -> bool: return not self.__eq__(other) def __len__(self) -> int: return len(self._container) def __iter__(self) -> typing.Iterator[str]: # Only provide the originally cased names for vals in self._container.values(): yield vals[0] def discard(self, key: str) -> None: try: del self[key] except KeyError: pass def add(self, key: str, val: str, *, combine: bool = False) -> None: """Adds a (name, value) pair, doesn't overwrite the value if it already exists. If this is called with combine=True, instead of adding a new header value as a distinct item during iteration, this will instead append the value to any existing header value with a comma. If no existing header value exists for the key, then the value will simply be added, ignoring the combine parameter. >>> headers = HTTPHeaderDict(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' >>> list(headers.items()) [('foo', 'bar'), ('foo', 'baz')] >>> headers.add('foo', 'quz', combine=True) >>> list(headers.items()) [('foo', 'bar, baz, quz')] """ # avoid a bytes/str comparison by decoding before httplib if isinstance(key, bytes): key = key.decode("latin-1") key_lower = key.lower() new_vals = [key, val] # Keep the common case aka no item present as fast as possible vals = self._container.setdefault(key_lower, new_vals) if new_vals is not vals: # if there are values here, then there is at least the initial # key/value pair assert len(vals) >= 2 if combine: vals[-1] = vals[-1] + ", " + val else: vals.append(val) def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: """Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ """ if len(args) > 1: raise TypeError( f"extend() takes at most 1 positional arguments ({len(args)} given)" ) other = args[0] if len(args) >= 1 else () if isinstance(other, HTTPHeaderDict): for key, val in other.iteritems(): self.add(key, val) elif isinstance(other, typing.Mapping): for key, val in other.items(): self.add(key, val) elif isinstance(other, typing.Iterable): other = typing.cast(typing.Iterable[typing.Tuple[str, str]], other) for key, value in other: self.add(key, value) elif hasattr(other, "keys") and hasattr(other, "__getitem__"): # THIS IS NOT A TYPESAFE BRANCH # In this branch, the object has a `keys` attr but is not a Mapping or any of # the other types indicated in the method signature. We do some stuff with # it as though it partially implements the Mapping interface, but we're not # doing that stuff safely AT ALL. for key in other.keys(): self.add(key, other[key]) for key, value in kwargs.items(): self.add(key, value) @typing.overload def getlist(self, key: str) -> list[str]: ... @typing.overload def getlist(self, key: str, default: _DT) -> list[str] | _DT: ... def getlist( self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed ) -> list[str] | _DT: """Returns a list of all the values for the named field. Returns an empty list if the key doesn't exist.""" try: vals = self._container[key.lower()] except KeyError: if default is _Sentinel.not_passed: # _DT is unbound; empty list is instance of List[str] return [] # _DT is bound; default is instance of _DT return default else: # _DT may or may not be bound; vals[1:] is instance of List[str], which # meets our external interface requirement of `Union[List[str], _DT]`. return vals[1:] # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist iget = getlist # Backwards compatibility for http.cookiejar get_all = getlist def __repr__(self) -> str: return f"{type(self).__name__}({dict(self.itermerged())})" def _copy_from(self, other: HTTPHeaderDict) -> None: for key in other: val = other.getlist(key) self._container[key.lower()] = [key, *val] def copy(self) -> HTTPHeaderDict: clone = type(self)() clone._copy_from(self) return clone def iteritems(self) -> typing.Iterator[tuple[str, str]]: """Iterate over all header lines, including duplicate ones.""" for key in self: vals = self._container[key.lower()] for val in vals[1:]: yield vals[0], val def itermerged(self) -> typing.Iterator[tuple[str, str]]: """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] yield val[0], ", ".join(val[1:]) def items(self) -> HTTPHeaderDictItemView: # type: ignore[override] return HTTPHeaderDictItemView(self) def _has_value_for_header(self, header_name: str, potential_value: str) -> bool: if header_name in self: return potential_value in self._container[header_name.lower()][1:] return False def __ior__(self, other: object) -> HTTPHeaderDict: # Supports extending a header dict in-place using operator |= # combining items with add instead of __setitem__ maybe_constructable = ensure_can_construct_http_header_dict(other) if maybe_constructable is None: return NotImplemented self.extend(maybe_constructable) return self def __or__(self, other: object) -> HTTPHeaderDict: # Supports merging header dicts using operator | # combining items with add instead of __setitem__ maybe_constructable = ensure_can_construct_http_header_dict(other) if maybe_constructable is None: return NotImplemented result = self.copy() result.extend(maybe_constructable) return result def __ror__(self, other: object) -> HTTPHeaderDict: # Supports merging header dicts using operator | when other is on left side # combining items with add instead of __setitem__ maybe_constructable = ensure_can_construct_http_header_dict(other) if maybe_constructable is None: return NotImplemented result = type(self)(maybe_constructable) result.extend(self) return result PKQZڃ5LL_request_methods.pynu[from __future__ import annotations import json as _json import typing from urllib.parse import urlencode from ._base_connection import _TYPE_BODY from ._collections import HTTPHeaderDict from .filepost import _TYPE_FIELDS, encode_multipart_formdata from .response import BaseHTTPResponse __all__ = ["RequestMethods"] _TYPE_ENCODE_URL_FIELDS = typing.Union[ typing.Sequence[typing.Tuple[str, typing.Union[str, bytes]]], typing.Mapping[str, typing.Union[str, bytes]], ] class RequestMethods: """ Convenience mixin for classes who implement a :meth:`urlopen` method, such as :class:`urllib3.HTTPConnectionPool` and :class:`urllib3.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. Specifically, :meth:`.request_encode_url` is for sending requests whose fields are encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded (such as for POST, PUT, PATCH). :meth:`.request` is for making any kind of request, it will look up the appropriate encoding format and use one of the above two methods to make the request. Initializer parameters: :param headers: Headers to include with all requests, unless other headers are given explicitly. """ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None: self.headers = headers or {} def urlopen( self, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, encode_multipart: bool = True, multipart_boundary: str | None = None, **kw: typing.Any, ) -> BaseHTTPResponse: # Abstract raise NotImplementedError( "Classes extending RequestMethods must implement " "their own ``urlopen`` method." ) def request( self, method: str, url: str, body: _TYPE_BODY | None = None, fields: _TYPE_FIELDS | None = None, headers: typing.Mapping[str, str] | None = None, json: typing.Any | None = None, **urlopen_kw: typing.Any, ) -> BaseHTTPResponse: """ Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. """ method = method.upper() if json is not None and body is not None: raise TypeError( "request got values for both 'body' and 'json' parameters which are mutually exclusive" ) if json is not None: if headers is None: headers = self.headers.copy() # type: ignore if not ("content-type" in map(str.lower, headers.keys())): headers["Content-Type"] = "application/json" # type: ignore body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode( "utf-8" ) if body is not None: urlopen_kw["body"] = body if method in self._encode_url_methods: return self.request_encode_url( method, url, fields=fields, # type: ignore[arg-type] headers=headers, **urlopen_kw, ) else: return self.request_encode_body( method, url, fields=fields, headers=headers, **urlopen_kw ) def request_encode_url( self, method: str, url: str, fields: _TYPE_ENCODE_URL_FIELDS | None = None, headers: typing.Mapping[str, str] | None = None, **urlopen_kw: str, ) -> BaseHTTPResponse: """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. """ if headers is None: headers = self.headers extra_kw: dict[str, typing.Any] = {"headers": headers} extra_kw.update(urlopen_kw) if fields: url += "?" + urlencode(fields) return self.urlopen(method, url, **extra_kw) def request_encode_body( self, method: str, url: str, fields: _TYPE_FIELDS | None = None, headers: typing.Mapping[str, str] | None = None, encode_multipart: bool = True, multipart_boundary: str | None = None, **urlopen_kw: str, ) -> BaseHTTPResponse: """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :func:`urllib3.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :func:`urllib.parse.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', } When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimic behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. """ if headers is None: headers = self.headers extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)} body: bytes | str if fields: if "body" in urlopen_kw: raise TypeError( "request got values for both 'fields' and 'body', can only specify one." ) if encode_multipart: body, content_type = encode_multipart_formdata( fields, boundary=multipart_boundary ) else: body, content_type = ( urlencode(fields), # type: ignore[arg-type] "application/x-www-form-urlencoded", ) extra_kw["body"] = body extra_kw["headers"].setdefault("Content-Type", content_type) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) PKQZ^.bb _version.pynu[# This file is protected via CODEOWNERS from __future__ import annotations __version__ = "2.0.4" PKQZ&& connection.pynu[from __future__ import annotations import datetime import logging import os import re import socket import sys import typing import warnings from http.client import HTTPConnection as _HTTPConnection from http.client import HTTPException as HTTPException # noqa: F401 from http.client import ResponseNotReady from socket import timeout as SocketTimeout if typing.TYPE_CHECKING: from typing_extensions import Literal from .response import HTTPResponse from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT from .util.ssltransport import SSLTransport from ._collections import HTTPHeaderDict from .util.response import assert_header_parsing from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout from .util.util import to_str from .util.wait import wait_for_read try: # Compiled with SSL? import ssl BaseSSLError = ssl.SSLError except (ImportError, AttributeError): ssl = None # type: ignore[assignment] class BaseSSLError(BaseException): # type: ignore[no-redef] pass from ._base_connection import _TYPE_BODY from ._base_connection import ProxyConfig as ProxyConfig from ._base_connection import _ResponseOptions as _ResponseOptions from ._version import __version__ from .exceptions import ( ConnectTimeoutError, HeaderParsingError, NameResolutionError, NewConnectionError, ProxyError, SystemTimeWarning, ) from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_ from .util.request import body_to_chunks from .util.ssl_ import assert_fingerprint as _assert_fingerprint from .util.ssl_ import ( create_urllib3_context, is_ipaddress, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, ) from .util.ssl_match_hostname import CertificateError, match_hostname from .util.url import Url # Not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError BrokenPipeError = BrokenPipeError log = logging.getLogger(__name__) port_by_scheme = {"http": 80, "https": 443} # When it comes time to update this value as a part of regular maintenance # (ie test_recent_date is failing) update it to ~6 months before the current date. RECENT_DATE = datetime.date(2022, 1, 1) _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") _HAS_SYS_AUDIT = hasattr(sys, "audit") class HTTPConnection(_HTTPConnection): """ Based on :class:`http.client.HTTPConnection` but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``source_address``: Set the source address for the current connection. - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass: .. code-block:: python HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [ (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) ] #: Whether this connection verifies the host's certificate. is_verified: bool = False #: Whether this proxy connection verified the proxy host's certificate. # If no proxy is currently connected to the value will be ``None``. proxy_is_verified: bool | None = None blocksize: int source_address: tuple[str, int] | None socket_options: connection._TYPE_SOCKET_OPTIONS | None _has_connected_to_proxy: bool _response_options: _ResponseOptions | None _tunnel_host: str | None _tunnel_port: int | None _tunnel_scheme: str | None def __init__( self, host: str, port: int | None = None, *, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 8192, socket_options: None | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options, proxy: Url | None = None, proxy_config: ProxyConfig | None = None, ) -> None: super().__init__( host=host, port=port, timeout=Timeout.resolve_default_timeout(timeout), source_address=source_address, blocksize=blocksize, ) self.socket_options = socket_options self.proxy = proxy self.proxy_config = proxy_config self._has_connected_to_proxy = False self._response_options = None self._tunnel_host: str | None = None self._tunnel_port: int | None = None self._tunnel_scheme: str | None = None # https://github.com/python/mypy/issues/4125 # Mypy treats this as LSP violation, which is considered a bug. # If `host` is made a property it violates LSP, because a writeable attribute is overridden with a read-only one. # However, there is also a `host` setter so LSP is not violated. # Potentially, a `@host.deleter` might be needed depending on how this issue will be fixed. @property def host(self) -> str: """ Getter method to remove any trailing dots that indicate the hostname is an FQDN. In general, SSL certificates don't include the trailing dot indicating a fully-qualified domain name, and thus, they don't validate properly when checked against a domain name that includes the dot. In addition, some servers may not expect to receive the trailing dot when provided. However, the hostname with trailing dot is critical to DNS resolution; doing a lookup with the trailing dot will properly only resolve the appropriate FQDN, whereas a lookup without a trailing dot will search the system's search domain list. Thus, it's important to keep the original host around for use only in those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). """ return self._dns_host.rstrip(".") @host.setter def host(self, value: str) -> None: """ Setter for the `host` property. We assume that only urllib3 uses the _dns_host attribute; httplib itself only uses `host`, and it seems reasonable that other libraries follow suit. """ self._dns_host = value def _new_conn(self) -> socket.socket: """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ try: sock = connection.create_connection( (self._dns_host, self.port), self.timeout, source_address=self.source_address, socket_options=self.socket_options, ) except socket.gaierror as e: raise NameResolutionError(self.host, self, e) from e except SocketTimeout as e: raise ConnectTimeoutError( self, f"Connection to {self.host} timed out. (connect timeout={self.timeout})", ) from e except OSError as e: raise NewConnectionError( self, f"Failed to establish a new connection: {e}" ) from e # Audit hooks are only available in Python 3.8+ if _HAS_SYS_AUDIT: sys.audit("http.client.connect", self, self.host, self.port) return sock def set_tunnel( self, host: str, port: int | None = None, headers: typing.Mapping[str, str] | None = None, scheme: str = "http", ) -> None: if scheme not in ("http", "https"): raise ValueError( f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'" ) super().set_tunnel(host, port=port, headers=headers) self._tunnel_scheme = scheme def connect(self) -> None: self.sock = self._new_conn() if self._tunnel_host: # If we're tunneling it means we're connected to our proxy. self._has_connected_to_proxy = True # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # type: ignore[attr-defined] # If there's a proxy to be connected to we are fully connected. # This is set twice (once above and here) due to forwarding proxies # not using tunnelling. self._has_connected_to_proxy = bool(self.proxy) @property def is_closed(self) -> bool: return self.sock is None @property def is_connected(self) -> bool: if self.sock is None: return False return not wait_for_read(self.sock, timeout=0.0) @property def has_connected_to_proxy(self) -> bool: return self._has_connected_to_proxy def close(self) -> None: try: super().close() finally: # Reset all stateful properties so connection # can be re-used without leaking prior configs. self.sock = None self.is_verified = False self.proxy_is_verified = None self._has_connected_to_proxy = False self._response_options = None self._tunnel_host = None self._tunnel_port = None self._tunnel_scheme = None def putrequest( self, method: str, url: str, skip_host: bool = False, skip_accept_encoding: bool = False, ) -> None: """""" # Empty docstring because the indentation of CPython's implementation # is broken but we don't want this method in our documentation. match = _CONTAINS_CONTROL_CHAR_RE.search(method) if match: raise ValueError( f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})" ) return super().putrequest( method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding ) def putheader(self, header: str, *values: str) -> None: """""" if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): super().putheader(header, *values) elif to_str(header.lower()) not in SKIPPABLE_HEADERS: skippable_headers = "', '".join( [str.title(header) for header in sorted(SKIPPABLE_HEADERS)] ) raise ValueError( f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'" ) # `request` method's signature intentionally violates LSP. # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental. def request( # type: ignore[override] self, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, *, chunked: bool = False, preload_content: bool = True, decode_content: bool = True, enforce_content_length: bool = True, ) -> None: # Update the inner socket's timeout value to send the request. # This only triggers if the connection is re-used. if self.sock is not None: self.sock.settimeout(self.timeout) # Store these values to be fed into the HTTPResponse # object later. TODO: Remove this in favor of a real # HTTP lifecycle mechanism. # We have to store these before we call .request() # because sometimes we can still salvage a response # off the wire even if we aren't able to completely # send the request body. self._response_options = _ResponseOptions( request_method=method, request_url=url, preload_content=preload_content, decode_content=decode_content, enforce_content_length=enforce_content_length, ) if headers is None: headers = {} header_keys = frozenset(to_str(k.lower()) for k in headers) skip_accept_encoding = "accept-encoding" in header_keys skip_host = "host" in header_keys self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) # Transform the body into an iterable of sendall()-able chunks # and detect if an explicit Content-Length is doable. chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize) chunks = chunks_and_cl.chunks content_length = chunks_and_cl.content_length # When chunked is explicit set to 'True' we respect that. if chunked: if "transfer-encoding" not in header_keys: self.putheader("Transfer-Encoding", "chunked") else: # Detect whether a framing mechanism is already in use. If so # we respect that value, otherwise we pick chunked vs content-length # depending on the type of 'body'. if "content-length" in header_keys: chunked = False elif "transfer-encoding" in header_keys: chunked = True # Otherwise we go off the recommendation of 'body_to_chunks()'. else: chunked = False if content_length is None: if chunks is not None: chunked = True self.putheader("Transfer-Encoding", "chunked") else: self.putheader("Content-Length", str(content_length)) # Now that framing headers are out of the way we send all the other headers. if "user-agent" not in header_keys: self.putheader("User-Agent", _get_default_user_agent()) for header, value in headers.items(): self.putheader(header, value) self.endheaders() # If we're given a body we start sending that in chunks. if chunks is not None: for chunk in chunks: # Sending empty chunks isn't allowed for TE: chunked # as it indicates the end of the body. if not chunk: continue if isinstance(chunk, str): chunk = chunk.encode("utf-8") if chunked: self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk)) else: self.send(chunk) # Regardless of whether we have a body or not, if we're in # chunked mode we want to send an explicit empty chunk. if chunked: self.send(b"0\r\n\r\n") def request_chunked( self, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, ) -> None: """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ warnings.warn( "HTTPConnection.request_chunked() is deprecated and will be removed " "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).", category=DeprecationWarning, stacklevel=2, ) self.request(method, url, body=body, headers=headers, chunked=True) def getresponse( # type: ignore[override] self, ) -> HTTPResponse: """ Get the response from the server. If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. """ # Raise the same error as http.client.HTTPConnection if self._response_options is None: raise ResponseNotReady() # Reset this attribute for being used again. resp_options = self._response_options self._response_options = None # Since the connection's timeout value may have been updated # we need to set the timeout on the socket. self.sock.settimeout(self.timeout) # This is needed here to avoid circular import errors from .response import HTTPResponse # Get the response from http.client.HTTPConnection httplib_response = super().getresponse() try: assert_header_parsing(httplib_response.msg) except (HeaderParsingError, TypeError) as hpe: log.warning( "Failed to parse headers (url=%s): %s", _url_from_connection(self, resp_options.request_url), hpe, exc_info=True, ) headers = HTTPHeaderDict(httplib_response.msg.items()) response = HTTPResponse( body=httplib_response, headers=headers, status=httplib_response.status, version=httplib_response.version, reason=httplib_response.reason, preload_content=resp_options.preload_content, decode_content=resp_options.decode_content, original_response=httplib_response, enforce_content_length=resp_options.enforce_content_length, request_method=resp_options.request_method, request_url=resp_options.request_url, ) return response class HTTPSConnection(HTTPConnection): """ Many of the parameters to this constructor are passed to the underlying SSL socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. """ default_port = port_by_scheme["https"] # type: ignore[misc] cert_reqs: int | str | None = None ca_certs: str | None = None ca_cert_dir: str | None = None ca_cert_data: None | str | bytes = None ssl_version: int | str | None = None ssl_minimum_version: int | None = None ssl_maximum_version: int | None = None assert_fingerprint: str | None = None def __init__( self, host: str, port: int | None = None, *, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, blocksize: int = 8192, socket_options: None | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options, proxy: Url | None = None, proxy_config: ProxyConfig | None = None, cert_reqs: int | str | None = None, assert_hostname: None | str | Literal[False] = None, assert_fingerprint: str | None = None, server_hostname: str | None = None, ssl_context: ssl.SSLContext | None = None, ca_certs: str | None = None, ca_cert_dir: str | None = None, ca_cert_data: None | str | bytes = None, ssl_minimum_version: int | None = None, ssl_maximum_version: int | None = None, ssl_version: int | str | None = None, # Deprecated cert_file: str | None = None, key_file: str | None = None, key_password: str | None = None, ) -> None: super().__init__( host, port=port, timeout=timeout, source_address=source_address, blocksize=blocksize, socket_options=socket_options, proxy=proxy, proxy_config=proxy_config, ) self.key_file = key_file self.cert_file = cert_file self.key_password = key_password self.ssl_context = ssl_context self.server_hostname = server_hostname self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ssl_version = ssl_version self.ssl_minimum_version = ssl_minimum_version self.ssl_maximum_version = ssl_maximum_version self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) self.ca_cert_data = ca_cert_data # cert_reqs depends on ssl_context so calculate last. if cert_reqs is None: if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode else: cert_reqs = resolve_cert_reqs(None) self.cert_reqs = cert_reqs def set_cert( self, key_file: str | None = None, cert_file: str | None = None, cert_reqs: int | str | None = None, key_password: str | None = None, ca_certs: str | None = None, assert_hostname: None | str | Literal[False] = None, assert_fingerprint: str | None = None, ca_cert_dir: str | None = None, ca_cert_data: None | str | bytes = None, ) -> None: """ This method should only be called once, before the connection is used. """ warnings.warn( "HTTPSConnection.set_cert() is deprecated and will be removed " "in urllib3 v2.1.0. Instead provide the parameters to the " "HTTPSConnection constructor.", category=DeprecationWarning, stacklevel=2, ) # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also # have an SSLContext object in which case we'll use its verify_mode. if cert_reqs is None: if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode else: cert_reqs = resolve_cert_reqs(None) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.key_password = key_password self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) self.ca_cert_data = ca_cert_data def connect(self) -> None: sock: socket.socket | ssl.SSLSocket self.sock = sock = self._new_conn() server_hostname: str = self.host tls_in_tls = False # Do we need to establish a tunnel? if self._tunnel_host is not None: # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. if self._tunnel_scheme == "https": self.sock = sock = self._connect_tls_proxy(self.host, sock) tls_in_tls = True # If we're tunneling it means we're connected to our proxy. self._has_connected_to_proxy = True self._tunnel() # type: ignore[attr-defined] # Override the host with the one we're requesting data from. server_hostname = self._tunnel_host if self.server_hostname is not None: server_hostname = self.server_hostname is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn( ( f"System time is way off (before {RECENT_DATE}). This will probably " "lead to SSL verification errors" ), SystemTimeWarning, ) sock_and_verified = _ssl_wrap_socket_and_match_hostname( sock=sock, cert_reqs=self.cert_reqs, ssl_version=self.ssl_version, ssl_minimum_version=self.ssl_minimum_version, ssl_maximum_version=self.ssl_maximum_version, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, ca_cert_data=self.ca_cert_data, cert_file=self.cert_file, key_file=self.key_file, key_password=self.key_password, server_hostname=server_hostname, ssl_context=self.ssl_context, tls_in_tls=tls_in_tls, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint, ) self.sock = sock_and_verified.socket self.is_verified = sock_and_verified.is_verified # If there's a proxy to be connected to we are fully connected. # This is set twice (once above and here) due to forwarding proxies # not using tunnelling. self._has_connected_to_proxy = bool(self.proxy) def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket: """ Establish a TLS connection to the proxy using the provided SSL context. """ # `_connect_tls_proxy` is called when self._tunnel_host is truthy. proxy_config = typing.cast(ProxyConfig, self.proxy_config) ssl_context = proxy_config.ssl_context sock_and_verified = _ssl_wrap_socket_and_match_hostname( sock, cert_reqs=self.cert_reqs, ssl_version=self.ssl_version, ssl_minimum_version=self.ssl_minimum_version, ssl_maximum_version=self.ssl_maximum_version, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, ca_cert_data=self.ca_cert_data, server_hostname=hostname, ssl_context=ssl_context, assert_hostname=proxy_config.assert_hostname, assert_fingerprint=proxy_config.assert_fingerprint, # Features that aren't implemented for proxies yet: cert_file=None, key_file=None, key_password=None, tls_in_tls=False, ) self.proxy_is_verified = sock_and_verified.is_verified return sock_and_verified.socket # type: ignore[return-value] class _WrappedAndVerifiedSocket(typing.NamedTuple): """ Wrapped socket and whether the connection is verified after the TLS handshake """ socket: ssl.SSLSocket | SSLTransport is_verified: bool def _ssl_wrap_socket_and_match_hostname( sock: socket.socket, *, cert_reqs: None | str | int, ssl_version: None | str | int, ssl_minimum_version: int | None, ssl_maximum_version: int | None, cert_file: str | None, key_file: str | None, key_password: str | None, ca_certs: str | None, ca_cert_dir: str | None, ca_cert_data: None | str | bytes, assert_hostname: None | str | Literal[False], assert_fingerprint: str | None, server_hostname: str | None, ssl_context: ssl.SSLContext | None, tls_in_tls: bool = False, ) -> _WrappedAndVerifiedSocket: """Logic for constructing an SSLContext from all TLS parameters, passing that down into ssl_wrap_socket, and then doing certificate verification either via hostname or fingerprint. This function exists to guarantee that both proxies and targets have the same behavior when connecting via TLS. """ default_ssl_context = False if ssl_context is None: default_ssl_context = True context = create_urllib3_context( ssl_version=resolve_ssl_version(ssl_version), ssl_minimum_version=ssl_minimum_version, ssl_maximum_version=ssl_maximum_version, cert_reqs=resolve_cert_reqs(cert_reqs), ) else: context = ssl_context context.verify_mode = resolve_cert_reqs(cert_reqs) # In some cases, we want to verify hostnames ourselves if ( # `ssl` can't verify fingerprints or alternate hostnames assert_fingerprint or assert_hostname # assert_hostname can be set to False to disable hostname checking or assert_hostname is False # We still support OpenSSL 1.0.2, which prevents us from verifying # hostnames easily: https://github.com/pyca/pyopenssl/pull/933 or ssl_.IS_PYOPENSSL or not ssl_.HAS_NEVER_CHECK_COMMON_NAME ): context.check_hostname = False # Try to load OS default certs if none are given. # We need to do the hasattr() check for our custom # pyOpenSSL and SecureTransport SSLContext objects # because neither support load_default_certs(). if ( not ca_certs and not ca_cert_dir and not ca_cert_data and default_ssl_context and hasattr(context, "load_default_certs") ): context.load_default_certs() # Ensure that IPv6 addresses are in the proper format and don't have a # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses # and interprets them as DNS hostnames. if server_hostname is not None: normalized = server_hostname.strip("[]") if "%" in normalized: normalized = normalized[: normalized.rfind("%")] if is_ipaddress(normalized): server_hostname = normalized ssl_sock = ssl_wrap_socket( sock=sock, keyfile=key_file, certfile=cert_file, key_password=key_password, ca_certs=ca_certs, ca_cert_dir=ca_cert_dir, ca_cert_data=ca_cert_data, server_hostname=server_hostname, ssl_context=context, tls_in_tls=tls_in_tls, ) try: if assert_fingerprint: _assert_fingerprint( ssl_sock.getpeercert(binary_form=True), assert_fingerprint ) elif ( context.verify_mode != ssl.CERT_NONE and not context.check_hostname and assert_hostname is not False ): cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment] # Need to signal to our match_hostname whether to use 'commonName' or not. # If we're using our own constructed SSLContext we explicitly set 'False' # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name. if default_ssl_context: hostname_checks_common_name = False else: hostname_checks_common_name = ( getattr(context, "hostname_checks_common_name", False) or False ) _match_hostname( cert, assert_hostname or server_hostname, # type: ignore[arg-type] hostname_checks_common_name, ) return _WrappedAndVerifiedSocket( socket=ssl_sock, is_verified=context.verify_mode == ssl.CERT_REQUIRED or bool(assert_fingerprint), ) except BaseException: ssl_sock.close() raise def _match_hostname( cert: _TYPE_PEER_CERT_RET_DICT | None, asserted_hostname: str, hostname_checks_common_name: bool = False, ) -> None: # Our upstream implementation of ssl.match_hostname() # only applies this normalization to IP addresses so it doesn't # match DNS SANs so we do the same thing! stripped_hostname = asserted_hostname.strip("[]") if is_ipaddress(stripped_hostname): asserted_hostname = stripped_hostname try: match_hostname(cert, asserted_hostname, hostname_checks_common_name) except CertificateError as e: log.warning( "Certificate did not match expected hostname: %s. Certificate: %s", asserted_hostname, cert, ) # Add cert to exception and reraise so client code can inspect # the cert when catching the exception, if they want to e._peer_cert = cert # type: ignore[attr-defined] raise def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError: # Look for the phrase 'wrong version number', if found # then we should warn the user that we're very sure that # this proxy is HTTP-only and they have a configuration issue. error_normalized = " ".join(re.split("[^a-z]", str(err).lower())) is_likely_http_proxy = ( "wrong version number" in error_normalized or "unknown protocol" in error_normalized ) http_proxy_warning = ( ". Your proxy appears to only use HTTP and not HTTPS, " "try changing your proxy URL to be HTTP. See: " "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" "#https-proxy-error-http-proxy" ) new_err = ProxyError( f"Unable to connect to proxy" f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}", err, ) new_err.__cause__ = err return new_err def _get_default_user_agent() -> str: return f"python-urllib3/{__version__}" class DummyConnection: """Used to detect a failed ConnectionCls import.""" if not ssl: HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811 VerifiedHTTPSConnection = HTTPSConnection def _url_from_connection( conn: HTTPConnection | HTTPSConnection, path: str | None = None ) -> str: """Returns the URL from a given connection. This is mainly used for testing and logging.""" scheme = "https" if isinstance(conn, HTTPSConnection) else "http" return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url PKQZ;lѧѧconnectionpool.pynu[from __future__ import annotations import errno import logging import queue import sys import typing import warnings import weakref from socket import timeout as SocketTimeout from types import TracebackType from ._base_connection import _TYPE_BODY from ._request_methods import RequestMethods from .connection import ( BaseSSLError, BrokenPipeError, DummyConnection, HTTPConnection, HTTPException, HTTPSConnection, ProxyConfig, _wrap_proxy_error, ) from .connection import port_by_scheme as port_by_scheme from .exceptions import ( ClosedPoolError, EmptyPoolError, FullPoolError, HostChangedError, InsecureRequestWarning, LocationValueError, MaxRetryError, NewConnectionError, ProtocolError, ProxyError, ReadTimeoutError, SSLError, TimeoutError, ) from .response import BaseHTTPResponse from .util.connection import is_connection_dropped from .util.proxy import connection_requires_http_tunnel from .util.request import _TYPE_BODY_POSITION, set_file_position from .util.retry import Retry from .util.ssl_match_hostname import CertificateError from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout from .util.url import Url, _encode_target from .util.url import _normalize_host as normalize_host from .util.url import parse_url from .util.util import to_str if typing.TYPE_CHECKING: import ssl from typing_extensions import Literal from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection log = logging.getLogger(__name__) _TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] _SelfT = typing.TypeVar("_SelfT") # Pool objects class ConnectionPool: """ Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. .. note:: ConnectionPool.urlopen() does not normalize or percent-encode target URIs which is useful if your target server doesn't support percent-encoded target URIs. """ scheme: str | None = None QueueCls = queue.LifoQueue def __init__(self, host: str, port: int | None = None) -> None: if not host: raise LocationValueError("No host specified.") self.host = _normalize_host(host, scheme=self.scheme) self.port = port # This property uses 'normalize_host()' (not '_normalize_host()') # to avoid removing square braces around IPv6 addresses. # This value is sent to `HTTPConnection.set_tunnel()` if called # because square braces are required for HTTP CONNECT tunneling. self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() def __str__(self) -> str: return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" def __enter__(self: _SelfT) -> _SelfT: return self def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> Literal[False]: self.close() # Return False to re-raise any potential exceptions return False def close(self) -> None: """ Close all pooled connections and disable the pool. """ # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 _blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} class HTTPConnectionPool(ConnectionPool, RequestMethods): """ Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`http.client.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`http.client.HTTPConnection`. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param retries: Retry configuration to use by default with requests in this pool. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.ProxyManager` :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.ProxyManager` :param \\**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, :class:`urllib3.connection.HTTPSConnection` instances. """ scheme = "http" ConnectionCls: ( type[BaseHTTPConnection] | type[BaseHTTPSConnection] ) = HTTPConnection def __init__( self, host: str, port: int | None = None, timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, maxsize: int = 1, block: bool = False, headers: typing.Mapping[str, str] | None = None, retries: Retry | bool | int | None = None, _proxy: Url | None = None, _proxy_headers: typing.Mapping[str, str] | None = None, _proxy_config: ProxyConfig | None = None, **conn_kw: typing.Any, ): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) if not isinstance(timeout, Timeout): timeout = Timeout.from_float(timeout) if retries is None: retries = Retry.DEFAULT self.timeout = timeout self.retries = retries self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) self.block = block self.proxy = _proxy self.proxy_headers = _proxy_headers or {} self.proxy_config = _proxy_config # Fill the queue up so that doing get() on it will block properly for _ in range(maxsize): self.pool.put(None) # These are mostly for testing and debugging purposes. self.num_connections = 0 self.num_requests = 0 self.conn_kw = conn_kw if self.proxy: # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. # We cannot know if the user has added default socket options, so we cannot replace the # list. self.conn_kw.setdefault("socket_options", []) self.conn_kw["proxy"] = self.proxy self.conn_kw["proxy_config"] = self.proxy_config # Do not pass 'self' as callback to 'finalize'. # Then the 'finalize' would keep an endless living (leak) to self. # By just passing a reference to the pool allows the garbage collector # to free self if nobody else has a reference to it. pool = self.pool # Close all the HTTPConnections in the pool before the # HTTPConnectionPool object is garbage collected. weakref.finalize(self, _close_pool_connections, pool) def _new_conn(self) -> BaseHTTPConnection: """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 log.debug( "Starting new HTTP connection (%d): %s:%s", self.num_connections, self.host, self.port or "80", ) conn = self.ConnectionCls( host=self.host, port=self.port, timeout=self.timeout.connect_timeout, **self.conn_kw, ) return conn def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: """ Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``. """ conn = None if self.pool is None: raise ClosedPoolError(self, "Pool is closed.") try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: # self.pool is None raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: except queue.Empty: if self.block: raise EmptyPoolError( self, "Pool is empty and a new connection can't be opened due to blocking mode.", ) from None pass # Oh well, we'll create a new connection then # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): log.debug("Resetting dropped connection: %s", self.host) conn.close() return conn or self._new_conn() def _put_conn(self, conn: BaseHTTPConnection | None) -> None: """ Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. """ if self.pool is not None: try: self.pool.put(conn, block=False) return # Everything is dandy, done. except AttributeError: # self.pool is None. pass except queue.Full: # Connection never got put back into the pool, close it. if conn: conn.close() if self.block: # This should never happen if you got the conn from self._get_conn raise FullPoolError( self, "Pool reached maximum size and no more connections are allowed.", ) from None log.warning( "Connection pool is full, discarding connection: %s. Connection pool size: %s", self.host, self.pool.qsize(), ) # Connection never got put back into the pool, close it. if conn: conn.close() def _validate_conn(self, conn: BaseHTTPConnection) -> None: """ Called right before a request is made, after the socket is created. """ def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: # Nothing to do for HTTP connections. pass def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: """Helper that always returns a :class:`urllib3.util.Timeout`""" if timeout is _DEFAULT_TIMEOUT: return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: # User passed us an int/float. This is for backwards compatibility, # can be removed later return Timeout.from_float(timeout) def _raise_timeout( self, err: BaseSSLError | OSError | SocketTimeout, url: str, timeout_value: _TYPE_TIMEOUT | None, ) -> None: """Is the error actually a timeout? Will raise a ReadTimeout or pass""" if isinstance(err, SocketTimeout): raise ReadTimeoutError( self, url, f"Read timed out. (read timeout={timeout_value})" ) from err # See the above comment about EAGAIN in Python 3. if hasattr(err, "errno") and err.errno in _blocking_errnos: raise ReadTimeoutError( self, url, f"Read timed out. (read timeout={timeout_value})" ) from err def _make_request( self, conn: BaseHTTPConnection, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, retries: Retry | None = None, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, chunked: bool = False, response_conn: BaseHTTPConnection | None = None, preload_content: bool = True, decode_content: bool = True, enforce_content_length: bool = True, ) -> BaseHTTPResponse: """ Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param response_conn: Set this to ``None`` if you will handle releasing the connection or set the connection to have the response release it. :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. """ self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) try: # Trigger any extra validation we need to do. try: self._validate_conn(conn) except (SocketTimeout, BaseSSLError) as e: self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise # _validate_conn() starts the connection to an HTTPS proxy # so we need to wrap errors with 'ProxyError' here too. except ( OSError, NewConnectionError, TimeoutError, BaseSSLError, CertificateError, SSLError, ) as e: new_e: Exception = e if isinstance(e, (BaseSSLError, CertificateError)): new_e = SSLError(e) # If the connection didn't successfully connect to it's proxy # then there if isinstance( new_e, (OSError, NewConnectionError, TimeoutError, SSLError) ) and (conn and conn.proxy and not conn.has_connected_to_proxy): new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) raise new_e # conn.request() calls http.client.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. try: conn.request( method, url, body=body, headers=headers, chunked=chunked, preload_content=preload_content, decode_content=decode_content, enforce_content_length=enforce_content_length, ) # We are swallowing BrokenPipeError (errno.EPIPE) since the server is # legitimately able to close the connection after sending a valid response. # With this behaviour, the received response is still readable. except BrokenPipeError: pass except OSError as e: # MacOS/Linux # EPROTOTYPE is needed on macOS # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ if e.errno != errno.EPROTOTYPE: raise # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout if not conn.is_closed: # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching # the exception and assuming all BadStatusLine exceptions are read # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( self, url, f"Read timed out. (read timeout={read_timeout})" ) conn.timeout = read_timeout # Receive the response from the server try: response = conn.getresponse() except (BaseSSLError, OSError) as e: self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise # Set properties that are used by the pooling layer. response.retries = retries response._connection = response_conn # type: ignore[attr-defined] response._pool = self # type: ignore[attr-defined] log.debug( '%s://%s:%s "%s %s %s" %s %s', self.scheme, self.host, self.port, method, url, # HTTP version conn._http_vsn_str, # type: ignore[attr-defined] response.status, response.length_remaining, # type: ignore[attr-defined] ) return response def close(self) -> None: """ Close all pooled connections and disable the pool. """ if self.pool is None: return # Disable access to the pool old_pool, self.pool = self.pool, None # Close all the HTTPConnections in the pool. _close_pool_connections(old_pool) def is_same_host(self, url: str) -> bool: """ Check if the given ``url`` is a member of the same host as this connection pool. """ if url.startswith("/"): return True # TODO: Add optional support for socket.gethostbyname checking. scheme, _, host, port, *_ = parse_url(url) scheme = scheme or "http" if host is not None: host = _normalize_host(host, scheme=scheme) # Use explicit default port for comparison when none is given if self.port and not port: port = port_by_scheme.get(scheme) elif not self.port and port == port_by_scheme.get(scheme): port = None return (scheme, host, port) == (self.scheme, self.host, self.port) def urlopen( # type: ignore[override] self, method: str, url: str, body: _TYPE_BODY | None = None, headers: typing.Mapping[str, str] | None = None, retries: Retry | bool | int | None = None, redirect: bool = True, assert_same_host: bool = True, timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, pool_timeout: int | None = None, release_conn: bool | None = None, chunked: bool = False, body_pos: _TYPE_BODY_POSITION | None = None, preload_content: bool = True, decode_content: bool = True, **response_kw: typing.Any, ) -> BaseHTTPResponse: """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param url: The URL to perform the request on. :param body: Data to send in the request body, either :class:`str`, :class:`bytes`, an iterable of :class:`str`/:class:`bytes`, or a file-like object. :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When ``False``, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param bool preload_content: If True, the response's body will be preloaded into memory. :param bool decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``preload_content`` which defaults to ``True``. :param bool chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. """ parsed_url = parse_url(url) destination_scheme = parsed_url.scheme if headers is None: headers = self.headers if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = preload_content # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) # Ensure that the URL we're connecting to is properly encoded if url.startswith("/"): url = to_str(_encode_target(url)) else: url = to_str(parsed_url.url) conn = None # Track whether `conn` needs to be released before # returning/raising/recursing. Update this variable if necessary, and # leave `release_conn` constant throughout the function. That way, if # the function recurses, the original value of `release_conn` will be # passed down into the recursive call, and its value will be respected. # # See issue #651 [1] for details. # # [1] release_this_conn = release_conn http_tunnel_required = connection_requires_http_tunnel( self.proxy, self.proxy_config, destination_scheme ) # Merge the proxy headers. Only done when not using HTTP CONNECT. We # have to copy the headers dict so we can safely change it without those # changes being reflected in anyone else's copy. if not http_tunnel_required: headers = headers.copy() # type: ignore[attr-defined] headers.update(self.proxy_headers) # type: ignore[union-attr] # Must keep the exception bound to a separate variable or else Python 3 # complains about UnboundLocalError. err = None # Keep track of whether we cleanly exited the except block. This # ensures we do proper cleanup in finally. clean_exit = False # Rewind body position, if needed. Record current position # for future rewinds in the event of a redirect/retry. body_pos = set_file_position(body, body_pos) try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] # Is this a closed/new connection that requires CONNECT tunnelling? if self.proxy is not None and http_tunnel_required and conn.is_closed: try: self._prepare_proxy(conn) except (BaseSSLError, OSError, SocketTimeout) as e: self._raise_timeout( err=e, url=self.proxy.url, timeout_value=conn.timeout ) raise # If we're going to release the connection in ``finally:``, then # the response doesn't need to know about the connection. Otherwise # it will also try to release it and we'll have a double-release # mess. response_conn = conn if not release_conn else None # Make the request on the HTTPConnection object response = self._make_request( conn, method, url, timeout=timeout_obj, body=body, headers=headers, chunked=chunked, retries=retries, response_conn=response_conn, preload_content=preload_content, decode_content=decode_content, **response_kw, ) # Everything went great! clean_exit = True except EmptyPoolError: # Didn't get a connection from the pool, no need to clean up clean_exit = True release_this_conn = False raise except ( TimeoutError, HTTPException, OSError, ProtocolError, BaseSSLError, SSLError, CertificateError, ProxyError, ) as e: # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False new_e: Exception = e if isinstance(e, (BaseSSLError, CertificateError)): new_e = SSLError(e) if isinstance( new_e, ( OSError, NewConnectionError, TimeoutError, SSLError, HTTPException, ), ) and (conn and conn.proxy and not conn.has_connected_to_proxy): new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) elif isinstance(new_e, (OSError, HTTPException)): new_e = ProtocolError("Connection aborted.", new_e) retries = retries.increment( method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] ) retries.sleep() # Keep track of the error for the retry warning. err = e finally: if not clean_exit: # We hit some kind of exception, handled or otherwise. We need # to throw the connection away unless explicitly told not to. # Close the connection, set the variable to None, and make sure # we put the None back in the pool to avoid leaking it. if conn: conn.close() conn = None release_this_conn = True if release_this_conn: # Put the connection back to be reused. If the connection is # expired then it will be None, which will get replaced with a # fresh connection during _get_conn. self._put_conn(conn) if not conn: # Try again log.warning( "Retrying (%r) after connection broken by '%r': %s", retries, err, url ) return self.urlopen( method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, chunked=chunked, body_pos=body_pos, preload_content=preload_content, decode_content=decode_content, **response_kw, ) # Handle redirect? redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: method = "GET" try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: response.drain_conn() raise return response response.drain_conn() retries.sleep_for_retry(response) log.debug("Redirecting %s -> %s", url, redirect_location) return self.urlopen( method, redirect_location, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, chunked=chunked, body_pos=body_pos, preload_content=preload_content, decode_content=decode_content, **response_kw, ) # Check if we should retry the HTTP response. has_retry_after = bool(response.headers.get("Retry-After")) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_status: response.drain_conn() raise return response response.drain_conn() retries.sleep(response) log.debug("Retry: %s", url) return self.urlopen( method, url, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, chunked=chunked, body_pos=body_pos, preload_content=preload_content, decode_content=decode_content, **response_kw, ) return response class HTTPSConnectionPool(HTTPConnectionPool): """ Same as :class:`.HTTPConnectionPool`, but HTTPS. :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ scheme = "https" ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection def __init__( self, host: str, port: int | None = None, timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, maxsize: int = 1, block: bool = False, headers: typing.Mapping[str, str] | None = None, retries: Retry | bool | int | None = None, _proxy: Url | None = None, _proxy_headers: typing.Mapping[str, str] | None = None, key_file: str | None = None, cert_file: str | None = None, cert_reqs: int | str | None = None, key_password: str | None = None, ca_certs: str | None = None, ssl_version: int | str | None = None, ssl_minimum_version: ssl.TLSVersion | None = None, ssl_maximum_version: ssl.TLSVersion | None = None, assert_hostname: str | Literal[False] | None = None, assert_fingerprint: str | None = None, ca_cert_dir: str | None = None, **conn_kw: typing.Any, ) -> None: super().__init__( host, port, timeout, maxsize, block, headers, retries, _proxy, _proxy_headers, **conn_kw, ) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.key_password = key_password self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version self.ssl_minimum_version = ssl_minimum_version self.ssl_maximum_version = ssl_maximum_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] """Establishes a tunnel connection through HTTP CONNECT.""" if self.proxy and self.proxy.scheme == "https": tunnel_scheme = "https" else: tunnel_scheme = "http" conn.set_tunnel( scheme=tunnel_scheme, host=self._tunnel_host, port=self.port, headers=self.proxy_headers, ) conn.connect() def _new_conn(self) -> BaseHTTPSConnection: """ Return a fresh :class:`urllib3.connection.HTTPConnection`. """ self.num_connections += 1 log.debug( "Starting new HTTPS connection (%d): %s:%s", self.num_connections, self.host, self.port or "443", ) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] raise ImportError( "Can't connect to HTTPS URL because the SSL module is not available." ) actual_host: str = self.host actual_port = self.port if self.proxy is not None and self.proxy.host is not None: actual_host = self.proxy.host actual_port = self.proxy.port return self.ConnectionCls( host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, cert_file=self.cert_file, key_file=self.key_file, key_password=self.key_password, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint, ssl_version=self.ssl_version, ssl_minimum_version=self.ssl_minimum_version, ssl_maximum_version=self.ssl_maximum_version, **self.conn_kw, ) def _validate_conn(self, conn: BaseHTTPConnection) -> None: """ Called right before a request is made, after the socket is created. """ super()._validate_conn(conn) # Force connect early to allow us to validate the connection. if conn.is_closed: conn.connect() if not conn.is_verified: warnings.warn( ( f"Unverified HTTPS request is being made to host '{conn.host}'. " "Adding certificate verification is strongly advised. See: " "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" "#tls-warnings" ), InsecureRequestWarning, ) def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: """ Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \\**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') """ scheme, _, host, port, *_ = parse_url(url) scheme = scheme or "http" port = port or port_by_scheme.get(scheme, 80) if scheme == "https": return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] else: return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] @typing.overload def _normalize_host(host: None, scheme: str | None) -> None: ... @typing.overload def _normalize_host(host: str, scheme: str | None) -> str: ... def _normalize_host(host: str | None, scheme: str | None) -> str | None: """ Normalize hosts for comparisons and use with sockets. """ host = normalize_host(host, scheme) # httplib doesn't like it when we include brackets in IPv6 addresses # Specifically, if we include brackets but also pass the port then # httplib crazily doubles up the square brackets on the Host header. # Instead, we need to make sure we never pass ``None`` as the port. # However, for backward compatibility reasons we can't actually # *assert* that. See http://bugs.python.org/issue28539 if host and host.startswith("[") and host.endswith("]"): host = host[1:-1] return host def _url_from_pool( pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None ) -> str: """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: """Drains a queue of connections and closes each one.""" try: while True: conn = pool.get(block=False) if conn: conn.close() except queue.Empty: pass # Done. PKQZcontrib/__init__.pynu[PKQZ,,contrib/__pycache__/__init__.cpython-311.pycnu[ bgdS)NrO/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/contrib/__init__.pyrsrPKQZft`o`o-contrib/__pycache__/pyopenssl.cpython-311.pycnu[ bgK zUdZddlmZddlZddlmZ ddlmZn#e $rGdde ZYnwxYwddl Z ddl Z ddl Z ddlZddlmZdd lmZdd lmZd d lmZejd ed e jrddlmZddgZejjejjejj ejje j!ejj"iZ#e$e dr%e$ejdrejj%e#e j&<e$e dr%e$ejdrejj'e#e j(<e j)ejj*e j+ejj,e j-ejj,ejj.ziZ/de/0DZ1e2ejdde2ejddzZ3de4d<e2ejddZ5de4d<e2ejddZ6de4d<e2ejddZ7de4d <e2ejd!dZ8de4d"<e j9j:e3e j9j;e3e j9j<e3e5ze j9j=e3e5ze6ze j9j>e3e5ze6ze7ze j9j?e3e5ze6ze7ziZ@d#e4d$<e j9j:e3e5ze6ze7ze8ze j9j;e3e6ze7ze8ze j9j<e3e7ze8ze j9j=e3e8ze j9j>e3e j9j?e3iZAd#e4d%<d&ZBejjCZDe jEeFZGd@d)ZHd@d*ZId@d+ZJdAd/ZKdBd3ZLGd4d5ZMejNeM_NGd6d7ZOdCd?ZPdS)Da Module for using pyOpenSSL as a TLS backend. This module was relevant before the standard library ``ssl`` module supported SNI, but now that we've dropped support for Python 2.7 all relevant Python versions support SNI so **this module is no longer recommended**. This needs the following packages installed: * `pyOpenSSL`_ (tested with 16.0.0) * `cryptography`_ (minimum 1.3.4, from pyopenssl) * `idna`_ (minimum 2.0, from cryptography) However, pyOpenSSL depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: .. code-block:: bash $ python -m pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this: .. code-block:: python try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass .. _pyopenssl: https://www.pyopenssl.org .. _cryptography: https://cryptography.io .. _idna: https://github.com/kjd/idna ) annotationsN)x509)UnsupportedExtensionceZdZdS)rN)__name__ __module__ __qualname__j/builddir/build/BUILD/cloudlinux-venv-1.0.7/venv/lib/python3.11/site-packages/urllib3/contrib/pyopenssl.pyrr2s r r)BytesIO)socket)timeout)utilz'urllib3.contrib.pyopenssl' module is deprecated and will be removed in urllib3 v2.1.0. Read more in this issue: https://github.com/urllib3/urllib3/issues/2680)category stacklevelX509inject_into_urllib3extract_from_urllib3PROTOCOL_TLSv1_1TLSv1_1_METHODPROTOCOL_TLSv1_2TLSv1_2_METHODci|]\}}|| Sr r ).0kvs r r bsPPPdaQPPPr OP_NO_SSLv2 OP_NO_SSLv3int_OP_NO_SSLv2_OR_SSLv3 OP_NO_TLSv1 _OP_NO_TLSv1 OP_NO_TLSv1_1_OP_NO_TLSv1_1 OP_NO_TLSv1_2_OP_NO_TLSv1_2 OP_NO_TLSv1_3_OP_NO_TLSv1_3zdict[int, int]_openssl_to_ssl_minimum_version_openssl_to_ssl_maximum_versioni@returnNonecttt_ttj_dt_dtj_dS)z7Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.TN)_validate_dependencies_metPyOpenSSLContextr SSLContextssl_ IS_PYOPENSSLr r r rrs9   &DO+DID!DIr ctt_ttj_dt_dtj_dS)z4Undo monkey-patching by :func:`inject_into_urllib3`.FN)orig_util_SSLContextrr4r5r6r r r rrs-+DO/DID"DIr cddlm}t|ddtdddlm}|}t|ddtddS) z{ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. r) Extensionsget_extension_for_classNzX'cryptography' module missing required functionality. Try upgrading to v1.3.4 or newer.r_x509zS'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.)cryptography.x509.extensionsr:getattr ImportErrorOpenSSL.cryptor)r:rrs r r2r2s 877777z4d;;C 0   $##### 466DtWd##+ /   ,+r namestr str | Nonec^d d}d|vr|S||}|dS|dS) a% Converts a dNSName SubjectAlternativeName field to the form used by the standard library on the given Python version. Cryptography produces a dNSName as a unicode string that was idna-decoded from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). If the name cannot be idna-encoded then we return None signalling that the name given should be skipped. rArBr/ bytes | Nonecddl} dD][}||rD|t|d}|d||zcS\||S#|jj$rYdSwxYw)z Borrowed wholesale from the Python Cryptography Project. It turns out that we can't just safely call `idna.encode`: it can explode for wildcard names. This avoids that problem. rN)z*..ascii)idna startswithlenencodecore IDNAError)rArIprefixs r idna_encodez'_dnsname_to_stdlib..idna_encodes   % F F??6**FF .D!==11DKK4E4EEEEEF;;t$$ $y"   44 sAA9#A99B  B :Nutf-8)rArBr/rE)decode)rArP encoded_names r _dnsname_to_stdlibrUsT$ d{{ ;t$$Lt   w ' ''r peer_certrlist[tuple[str, str]]c:|} |jtjj}nc#tj$rgcYStjttj tf$r'}t d|gcYd}~Sd}~wwxYwdtt|tjD}|d|tjD|S)zU Given an PyOpenSSL certificate, provides all the subject alternative names. zA problem was encountered with the certificate that prevented urllib3 from finding the SubjectAlternativeName field. This can affect certificate validation. The error was %sNcg|]}|d|f S)NDNSr rrAs r z%get_subj_alt_name..s,          r c38K|]}dt|fVdS)z IP AddressN)rBr[s r z$get_subj_alt_name..s>&*s4yy!r )to_cryptography extensionsr;rSubjectAlternativeNamevalueExtensionNotFoundDuplicateExtensionrUnsupportedGeneralNameType UnicodeErrorlogwarningmaprUget_values_for_typeDNSNameextend IPAddress)rVcertextenamess r get_subj_alt_namerrs?  $ $ & &Do55d6QRRX  !   '    >      .  *C,C,CDL,Q,QRR   E  LL.1.E.Edn.U.U Ls#)AB &B 9BB B ceZdZdZ d%d&d Zd'd Zd(dZd)dZd*dZd+dZ d,dZ d-dZ d(dZ d(dZ d(dZ d.d/d!Zd0d#Zd$S)1 WrappedSocketz@API-compatibility wrapper for Python OpenSSL's Connection-class.T connectionOpenSSL.SSL.Connectionr socket_clssuppress_ragged_eofsboolr/r0cL||_||_||_d|_d|_dSNrF)rurrx_io_refs_closed)selfrurrxs r __init__zWrappedSocket.__init__s, % $8!  r r#c4|jSN)rfilenor~s r rzWrappedSocket.fileno*s{!!###r cv|jdkr|xjdzc_|jr|dSdS)Nr)r|r}closers r _decref_socketioszWrappedSocket._decref_socketios.sD =1   MMQ MM <  JJLLLLL  r args typing.Anykwargsbytesc |jj|i|}|S#tjj$rF}|jr|jdkrYd}~dSt|jdt||d}~wtjj $r1|j tjj krYdStjj $rX}tj|j|jst#d||j|i|cYd}~Sd}~wtjj$r}t'jd||d}~wwxYw)NzUnexpected EOFr rThe read operation timed out read error: )rurecvOpenSSLSSL SysCallErrorrxrOSErrorrBZeroReturnError get_shutdownRECEIVED_SHUTDOWN WantReadErrorr wait_for_readr gettimeoutrErrorsslSSLError)r~rrdatarps r rzWrappedSocket.recv4s~ '4?'888D*K){' 8 8 8( 8QV7M-M-MsssssafQiQ00a7{*   ++--1NNNss{( 2 2 2%dk4;3I3I3K3KLL 2<==1D ty$1&11111111{  < < <,3a3344! ; 227>3OQUVV V'(8(8(:(:(=>@B/55   r rBc4|jSr)ruget_protocol_version_namers r versionzWrappedSocket.versions88:::r N)T)rurvrrwrxryr/r0r/r#r/r0)rrrrr/r)rrrrr/r#)rrr/r0)rrr/r#)rrr/r0)F)rryr/r)r/rB)rrr __doc__rrrrrrrrrrrrrr r r rtrts2JJ &*     $$$$ 2<<<<.//// 8 8 8 8#### #(     ;;;;;;r rtcReZdZdZd3dZed4dZejd5d Zed4d Zejd6d Zd7d Z d8dZ d9d:dZ d;dd?d+Zd7d,Zed4d-Zejd@d/Zed4d0ZejdAd2ZdS)Br3z I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. protocolr#r/r0ct||_tj|j|_d|_d|_tj j |_ tj j |_ dSr{)_openssl_versionsrrrContext_ctx_optionscheck_hostnamer TLSVersionMINIMUM_SUPPORTED_minimum_versionMAXIMUM_SUPPORTED_maximum_version)r~rs r rzPyOpenSSLContext.__init__sV)(3 K'' 66  #%(^%E%(^%Er c|jSr)rrs r optionszPyOpenSSLContext.optionss }r rbc<||_|dSr)r_set_ctx_optionsr~rbs r rzPyOpenSSLContext.optionss!  r cJt|jSr)_openssl_to_stdlib_verifyrget_verify_moders r verify_modezPyOpenSSLContext.verify_modes()B)B)D)DEEr ssl.VerifyModec\|jt|tdSr)r set_verify_stdlib_to_openssl_verify_verify_callbackrs r rzPyOpenSSLContext.verify_modes' 6u=?OPPPPPr c8|jdSr)rset_default_verify_pathsrs r rz)PyOpenSSLContext.set_default_verify_pathss **,,,,,r ciphers bytes | strct|tr|d}|j|dS)NrR) isinstancerBrLrset_cipher_list)r~rs r set_cipherszPyOpenSSLContext.set_cipherssA gs # # .nnW--G !!'*****r NcafilerCcapathcadatarEcX||d}||d} |j|||)|jt|dSdS#tjj$r}tjd||d}~wwxYw)NrRz%unable to load trusted certificates: ) rLrload_verify_locationsr rrrrr)r~rrrrps r rz&PyOpenSSLContext.load_verify_locationss  ]]7++F  ]]7++F U I + +FF ; ; ;! //@@@@@"!{  U U U,LqLLMMST T UsAA88B) B$$B)certfilerBkeyfilepasswordcp |j|Gttsd|jfd|j|p|dS#tjj $r}tj d||d}~wwxYw)NrRcSrr )_rs r z2PyOpenSSLContext.load_cert_chain..s8r z"Unable to load certificate chain: ) ruse_certificate_chain_filerrrL set_passwd_cbuse_privatekey_filerrrrr)r~rrrrps ` r load_cert_chainz PyOpenSSLContext.load_cert_chains  R I 0 0 : : :#!(E228'w77H ''(;(;(;(;<<< I ) )'*=X > > > > >{  R R R,IAIIJJPQ Q RsA?BB5B00B5 protocolslist[bytes | str]cNd|D}|j|S)NcNg|]"}tj|d#S)rH)rto_bytes)rps r r\z7PyOpenSSLContext.set_alpn_protocols..s*GGGTY''733GGGr )rset_alpn_protos)r~rs r set_alpn_protocolsz#PyOpenSSLContext.set_alpn_protocolss+GGYGGG y((333r FTsockrw server_siderydo_handshake_on_connectrxserver_hostnamebytes | str | Nonertctj|j|}|r^tj|s?t|tr| d}| ||  | n#tjj $rA}t j||st!d|Yd}~gd}~wtjj$r}t%jd||d}~wwxYw t)||S)NrRTzselect timed outzbad handshake: )rr Connectionrrr5 is_ipaddressrrBrLset_tlsext_host_nameset_connect_state do_handshakerrrrrrrrt)r~rrrrxrcnxrps r wrap_socketzPyOpenSSLContext.wrap_socketsVk$$TY55  649#9#9/#J#J 6/3// B"1"8"8"A"A  $ $_ 5 5 5   C  """";,   )$0A0ABB=!"4551<;$ C C Cl#:Q#:#:;;B C S$'''s$B11D57DD5D00D5c|j|jt|jzt |jzdSr)r set_optionsrr-rr.rrs r rz!PyOpenSSLContext._set_ctx_options sM  M-d.CD E-d.CD E     r c|jSr)rrs r minimum_versionz PyOpenSSLContext.minimum_version $$r rc<||_|dSr)rr)r~rs r rz PyOpenSSLContext.minimum_version" / r c|jSr)rrs r maximum_versionz PyOpenSSLContext.maximum_versionrr rc<||_|dSr)rr)r~rs r rz PyOpenSSLContext.maximum_version rr )rr#r/r0r)rbr#r/r0)rbrr/r0r)rrr/r0)NNN)rrCrrCrrEr/r0)NN)rrBrrCrrCr/r0)rrr/r0)FTTN) rrwrryrryrxryrrr/rt)rr#r/r0)rr#r/r0)rrr rrpropertyrsetterrrrrrrrrrrr r r r3r3s FFFFX ^   ^ FFFXFQQQQ----++++"!# UUUUU(## RRRRR 4444"(,%).2 (((((>    %%%X%    %%%X%      r r3rrvrerr_no err_depth return_coderyc|dkSrr )rrrrrs r rr&s Q;r r)rArBr/rC)rVrr/rW) rrvrrrr#rr#rr#r/ry)Qr __future__r OpenSSL.SSLr cryptographyrcryptography.x509rr? Exceptionloggingrtypingwarningsior rrwrrwarnDeprecationWarning TYPE_CHECKINGr@r__all__r5 PROTOCOL_TLSr SSLv23_METHODPROTOCOL_TLS_CLIENTPROTOCOL_TLSv1 TLSv1_METHODrhasattrrrrr CERT_NONE VERIFY_NONE CERT_OPTIONAL VERIFY_PEER CERT_REQUIREDVERIFY_FAIL_IF_NO_PEER_CERTritemsrr>r$__annotations__r&r(r*r,rrTLSv1TLSv1_1TLSv1_2TLSv1_3rr-r.rr4r8 getLoggerrrgrrr2rUrrrtmakefiler3rr r r rCs&&&P#""""" 6666666        y       ''''''  5   $###### !"8 9 IGK5I!7;#< 0  73"##I =M(N(NI.5k.Hc*+ 73"##I =M(N(NI.5k.Hc*+M7;*w{.w{. k-. QP.G.M.M.O.OPPP%WW[-CCgg KGGGGK:: ::::ggk?A>>>>>>ggk?A>>>>>>ggk?A>>>>>>N$&;N/N1L@N1L@>QN ,~=NN$ ,~=N 3    N$          N.?.PN1NB^SN1NBN1N$&;3$y+g!!""""####    4&(&(&(&(R----`;;;;;;;;D$, C C C C C C C C Ls 22PKQZ33contrib/__pycache__/securetransport.cpython-311.pycnu[ bgI UdZddlmZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddl m ZddlmZddlmZmZdd lmZmZmZmZmZmZmZejd ed e j rdd l!m"Z"d dgZ#ej$j%Z&ej'Z(de)d<e j*Z+dZ,ej$j-ej.ej/fej$j0ej.ej/fiZ1e2e drej3ej3fe1e j4<e2e drej5ej5fe1e j6<e2e drej.ej.fe1e j7<e2e drej8ej8fe1e j9<e2e drej/ej/fe1e j:<e j;j<ej.e j;j=ej.e j;j>ej8e j;j?ej/e j;j@ej/iZAde)d<d4dZBd4dZCd5d"ZDd5d#ZEejFeDZGejHeEZIGd$d%ZJ d6d7d1ZKeKeJ_KGd2d3ZLdS)8a SecureTranport support for urllib3 via ctypes. This makes platform-native TLS available to urllib3 users on macOS without the use of a compiler. This is an important feature because the Python Package Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL that ships with macOS is not capable of doing TLSv1.2. The only way to resolve this is to give macOS users an alternative solution to the problem, and that solution is to use SecureTransport. We use ctypes here because this solution must not require a compiler. That's because pip is not allowed to require a compiler either. This is not intended to be a seriously long-term solution to this problem. The hope is that PEP 543 will eventually solve this issue for us, at which point we can retire this contrib module. But in the short term, we need to solve the impending tire fire that is Python on Mac without this kind of contrib module. So...here we are. To use this module, simply import and inject it:: import urllib3.contrib.securetransport urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: .. code-block:: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ) annotationsN)socket)util)CoreFoundationSecurity) SecurityConst_assert_no_error_build_tls_unknown_ca_alert_cert_array_from_pem_create_cfstring_array_load_client_cert_chain_temporary_keychainz'urllib3.contrib.securetransport' module is deprecated and will be removed in urllib3 v2.1.0. Read more in this issue: https://github.com/urllib3/urllib3/issues/2681)category stacklevel)Literalinject_into_urllib3extract_from_urllib3z/weakref.WeakValueDictionary[int, WrappedSocket]_connection_refsi@PROTOCOL_SSLv2PROTOCOL_SSLv3PROTOCOL_TLSv1PROTOCOL_TLSv1_1PROTOCOL_TLSv1_2zdict[int, int]_tls_version_to_streturnNonectt_ttj_dt_dtj_dS)zG Monkey-patch urllib3 with SecureTransport-backed SSL-support. TN)SecureTransportContextr SSLContextssl_IS_SECURETRANSPORTp/builddir/build/BUILD/cloudlinux-venv-1.0.7/venv/lib/python3.11/site-packages/urllib3/contrib/securetransport.pyrrs--DO1DI"D#'DI   r%ctt_ttj_dt_dtj_dS)z> Undo monkey-patching by :func:`inject_into_urllib3`. FN)orig_util_SSLContextrr!r"r#r$r%r&rrs-+DO/DI#D#(DI   r% connection_idint data_bufferdata_length_pointer bytearrayc4d} t|}| tjS|j}|d}|}d}d} ||kr||dkr/t j||sttj d||z } tj | z ||z} || | } || z }| s|s tjSn||kng#t$rZ} | j }|G|tj kr7||d<|tjks|tjkrtjcYd} ~ SYd} ~ nd} ~ wwxYw||d<||kr tjSdS#t($r} || |_tjcYd} ~ Sd} ~ wwxYw)zs SecureTransport read callback. This is called by ST to request that data be returned from the socket. Nr timed out)rgetr errSSLInternalr gettimeoutr wait_for_readOSErrorerrnoEAGAINctypesc_char from_address recv_intoerrSSLClosedGraceful ECONNRESETEPIPEerrSSLClosedAborterrSSLWouldBlock Exception _exception) r)r+r,wrapped_socket base_socketrequested_lengthtimeouterror read_count remainingbuffer chunk_sizees r&_read_callbackrLs N.,)--m<<  ! / /$+ .q1 ++--  ///?gll-k7CCA%elK@@@,z9  -)3AA*,)2269EE j( !%B,AA///   GE Uel%:%:)3#A&E,,,0D0D(::::::: ",A ) ) ) 1 1q ,,,  %()N %+++++++,sf'E.'E.BC.&C.-E.. E8A E EE.E E. EE.. F8F FFcd} t|}| tjS|j}|d}t j||}|}d}d} | |kra||dkr/tj ||sttj d| |} | | z } || d}| |kang#t$rZ} | j }|G|tj kr7| |d<|tjks|tjkrtjcYd} ~ SYd} ~ nd} ~ wwxYw| |d<| |kr tjSdS#t$$r} || |_tjcYd} ~ Sd} ~ wwxYw)zx SecureTransport write callback. This is called by ST to request that data actually be sent on the network. Nrr/)rr0r r1rr7 string_atr2rwait_for_writer4r5r6sendr<r=r>r?r@rA) r)r+r,rBrCbytes_to_writedatarErFsent chunk_sentrKs r&_write_callbackrUsN*,)--m<<  ! / /$+ ,Q/ ^<< ++-- ''?gll.{GDDA%elK@@@(--d33  "JKK(''   GE Uel%:%:)-#A&E,,,0D0D(::::::: "&A > ! ! 1 1q ,,,  %()N %+++++++,s`'EdZ d?dZ d@d Z dAd"Z dBdCd(ZdDd+ZdEd-ZdFd/ZdGd0Zd@d1Zd@d2Zd@d3ZdHdId6ZdJd8Zd#S)K WrappedSocketzO API-compatibility wrapper for Python's OpenSSL wrapped socket object. r socket_clsrrc||_d|_d|_d|_d|_d|_d|_d|_d|_|j |_ |j ddSNrF) rcontext_io_refs_closed _real_closedrA _keychain _keychain_dir_client_cert_chainr2_timeout settimeout)selfrs r&__init__zWrappedSocket.__init__;sw    !,0)-"& ..00  q!!!!!r%"typing.Generator[None, None, None]c#xKd|_dV|j%|jdc}|_||dS)a] A context manager that can be used to wrap calls that do I/O from SecureTransport. If any of the I/O callbacks hit an exception, this context manager will correctly propagate the exception after the fact. This avoids silently swallowing those exceptions. It also correctly forces the socket closed. N)rA _real_close)rd exceptions r&_raise_on_errorzWrappedSocket._raise_on_errorNsQ  ? &)-$ &It      O ' &r% protocolslist[bytes] | Nonec|sdSt|} tj|j|}t |t j|dS#t j|wxYw)z< Sets up the ALPN protocols on the context. N)rr SSLSetALPNProtocolsr[r r CFRelease)rdrk protocols_arrresults r&_set_alpn_protocolsz!WrappedSocket._set_alpn_protocolscss  F.y99  41$, NNF V $ $ $  $] 3 3 3 3 3N $] 3 3 3 3s )AA*verifybool trust_bundle bytes | Nonec6|r|dStjtjf} ||}||vrdSdt |}d}n#t $r}d|}|}Yd}~nd}~wwxYwt |}|j |tj ddd} |j tj tj| |t!jd||)z Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. Raises an SSLError if the connection is not trusted. Nz error code: z exception: iirrzcertificate verify failed, )r kSecTrustResultUnspecifiedkSecTrustResultProceed_evaluate_trustr*r@r versionrsendallstructpack setsockopt SOL_SOCKET SO_LINGERrhsslSSLError) rdrsru successes trust_resultreasonexcrKrecoptss r&_custom_validatezWrappedSocket._custom_validateps9 - F  4  0  // ==Ly((7C $5$577FCC   (1((FCCCCCC  *$,,..99 C   {4A&& v0&2BDIII lAAABBKsAA A+A&&A+bytesr*ctj|r>, ' ' (lD)) (Q vvxx  ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( $&& 5-l;;J .t|V\%=P=PQQF V $ $ $ El#CDDD;E:NNF V $ $ $?tLLF V $ $ $#688L.ufl<6P6PQQF V $ $ $ 0(///%(444  0(///%(4444&!!sAAA2C1F/F?server_hostnamebytes | str | None min_version max_version client_cert str | None client_keyclient_key_passphrase typing.Anyalpn_protocolsc tjdtjtj|_tj|jtt} t| t5t|dz} | tvr| dzdz} | tv|t| <dddn #1swxYwYtj |j| } t| |rat|ts|d}tj|j|t%|} t| || tj|j|} t| tj|j|} t| |r|4tj|jtjd} t| |rdt1\|_|_t7|j|||_tj|j|j} t|  |5tj|j} | tj krtCj"d| tj#kr#|$|| dddt|  ddddS#1swxYwY)z Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. Nirzutf-8Tzhandshake timed out)%r SSLCreateContextr kSSLClientSidekSSLStreamTyper[ SSLSetIOFuncs_read_callback_pointer_write_callback_pointerr _connection_ref_lockidrSSLSetConnection isinstancerencodeSSLSetPeerDomainNamelenrrSSLSetProtocolVersionMinSSLSetProtocolVersionMaxSSLSetSessionOption"kSSLSessionOptionBreakOnServerAuthrr_r`rraSSLSetCertificaterj SSLHandshaker?rrEerrSSLServerAuthCompletedr) rdrrsrurrrrrrrqhandles r& handshakezWrappedSocket.handshakesz$ 0 -. 0L   ' L02I      " , ,XX *F,,, 1* 2,,,'+ V $  , , , , , , , , , , , , , , , *4<@@    %ou55 B"1"8"8"A"A2 os?/C/CF V $ $ $   00024<MM   24<MM    %11 mNPTF V $ $ $  %1D1F1F .DND.&= Z''D #/ d>UVVF V $ $ $ %%''  !.t|<<];;; .)>???}FFF))&,???       %V,,,                 s++7B..B25B2A%K-K--K14K1c4|jSN)rfilenords r&rzWrappedSocket.filenos{!!###r%cv|jdkr|xjdzc_|jr|dSdS)Nrr)r\r]closers r&_decref_socketioszWrappedSocket._decref_socketiossD =1   MMQ MM <  JJLLLLL  r%bufsizctj|}|||}|d|}tjt |Sr)r7create_string_bufferr:typingcastr)rdrrI bytes_readrRs r&recvzWrappedSocket.recv#sE,V44^^FF33 kzk"{5$'''r%NrIctypes.Array[ctypes.c_char]nbytes int | Nonec @|jrdS|t|}tj|z|}tjd}|5tj|j ||tj |}dddn #1swxYwY|tj kr |j dkrtjdn>|tjtjfvr|nt'||j S)Nrzrecv timed out)r^rr7r8 from_bufferc_size_trjr SSLReadr[rr r?rrrEr;errSSLClosedNoNotifyrhr )rdrIrprocessed_bytesrqs r&r:zWrappedSocket.recv_into)sa   1 >[[F-&(55f== /!,,  ! ! # #  % fffl?.K.KF                ]3 3 3 $))n%5666*  .  .           V $ $ $$$s%/B  B$'B$rEfloatc||_dSrrb)rdrEs r&rczWrappedSocket.settimeoutVs  r% float | Nonec|jSrrrs r&r2zWrappedSocket.gettimeoutYs }r%rRc tjd}|5tj|j|t |tj|}dddn #1swxYwY|tj kr|j dkrtj dt||j S)Nrzsend timed out)r7rrjr SSLWriter[rrr r?rrrEr )rdrRrrqs r&rPzWrappedSocket.send\s /!,,  ! ! # #  & dCIIv|O/L/LF                ]3 3 38MQR8R8R.!122 2 V $ $ $$$s 7  &t~ 6 6 6  $T^ 4 4 4 M$, - - -26 6DNT/{  """r%F binary_formc\|stdtj}d}d} tj|jt j|}t||s1 |rtj ||rtj |dSdStj |}|s1 |rtj ||rtj |dSdStj |d}|sJtj |}|sJtj |}tj|} t j| |}|rtj ||rtj |n2#|rtj ||rtj |wwxYw|S)Nz2SecureTransport only supports dumping binary certsr) ValueErrorr rrr[r7rr rroSecTrustGetCertificateCountSecTrustGetCertificateAtIndexSecCertificateCopyDataCFDataGetLengthCFDataGetBytePtrrN) rdrrcertdata der_bytesrq cert_countleaf data_lengthr+s r& getpeercertzWrappedSocket.getpeercerts( SQRR R$&&  0.t|V\%=P=PQQF V $ $ $ & 3(222 0(///// 0 0'"=eDDJ  3(222 0(///// 0 09%CCDKK4 6t<]9 9 9,IJJ J ^}; ; ;9 ^}; ; ;9 ^}: : :7 ^}: : :7 ^}: : :7,CxCCDD Dr%)rrXrr)rrf)rkrlrr)rsrtrurvrr)rurrr*)rrrsrtrurvrr*rr*rrrrrrrrlrrrr*rr)rr*rrr)rIrrrrr*)rErrr)rr)rRrrr*)rRrrr)F)rrtrrv)rr)__name__ __module__ __qualname____doc__re contextlibcontextmanagerrjrrrr{rrrrr:rcr2rPr}rrrhrr|r$r%r&rWrW6s""""&( 4 4 4 4#L#L#L#LJ&"&"&"&"PZZZZx$$$$ ((((IM+%+%+%+%+%Z    %%%%" ,,,, # # # #88888tEEEEEEr%rWrrdrXmodeILiteral['r'] | Literal['w'] | Literal['rw'] | Literal['wr'] | Literal[''] bufferingrargsrkwargstyping.BinaryIO | typing.TextIOc4d}tj|||g|Ri|Sr)rXmakefile)rdrrrrs r&rrs.I  tT9 Ft F F Fv F FFr%ceZdZdZd5dZed6dZejd7d Zed8d Zejd9d Zed8dZ e jd9dZ d:dZ d:dZ d;dZ dd?dZd@d"Z dAdBd.Zed8d/ZejdCd1Zed8d2ZejdDd4ZdS)Er z I am a wrapper class for the SecureTransport library, to translate the interface of the standard library ``SSLContext`` object to calls into SecureTransport. rr*rrc*tjj|_tjj|_|dtjtjfvrt|\|_ |_ d|_ d|_ d|_ d|_d|_d|_d|_dSrZ)r TLSVersionMINIMUM_SUPPORTED_minimum_versionMAXIMUM_SUPPORTED_maximum_version PROTOCOL_TLSPROTOCOL_TLS_CLIENT_protocol_to_min_max _min_version _max_version_options_verify _trust_bundle _client_cert _client_key_client_key_passphrase_alpn_protocols)rdrs r&rezSecureTransportContext.__init__s%(^%E%(^%E D#"2C4KL L L3G3Q 0D t0  +/(,'+&*#37r% Literal[True]cdS) SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. Tr$rs r&check_hostnamez%SecureTransportContext.check_hostnames tr%rrcdS)r,Nr$rdrs r&r-z%SecureTransportContext.check_hostname sr%c|jSrr#rs r&optionszSecureTransportContext.optionss }r%c||_dSrr1r/s r&r2zSecureTransportContext.optionss r%c@|jr tjn tjSr)r$r CERT_REQUIRED CERT_NONErs r& verify_modez"SecureTransportContext.verify_mode!s$(LCs  cmCr%c0|tjk|_dSr)rr5r$r/s r&r7z"SecureTransportContext.verify_mode%s 11 r%cdSrr$rs r&set_default_verify_pathsz/SecureTransportContext.set_default_verify_paths)s  r%c*|Sr)r:rs r&load_default_certsz)SecureTransportContext.load_default_certs5s,,...r%ciphersc td)Nz5SecureTransport doesn't support custom cipher strings)r)rdr=s r& set_ciphersz"SecureTransportContext.set_ciphers8sPQQQr%Ncafilercapathcadatarvc|td|(t|5 dddn #1swxYwY|p||_dS)Nz1SecureTransport does not support cert directories)rrr%)rdr@rArBs r&load_verify_locationsz,SecureTransportContext.load_verify_locations;s  PQQ Q  f                 $-vs 155certfilerkeyfilepasswordc0||_||_||_dSr)r&r'_client_cert_passphrase)rdrErFrGs r&load_cert_chainz&SecureTransportContext.load_cert_chainLs" %"'/$$$r%rklist[str | bytes]cpttdstdd|D|_dS)z Sets the ALPN protocols that will later be set on the context. Raises a NotImplementedError if ALPN is not supported. rnz2SecureTransport supports ALPN only in macOS 10.12+cNg|]"}tj|d#S)ascii)rto_bytes).0ps r& z=SecureTransportContext.set_alpn_protocols..`s*RRR1 2 21g > >RRRr%N)hasattrr NotImplementedErrorr))rdrks r&set_alpn_protocolsz)SecureTransportContext.set_alpn_protocolsVsK x!677 %D  SR RRRr%FTsockrX server_sidertdo_handshake_on_connectsuppress_ragged_eofsrrrWc |rJ|sJ|sJt|}|||j|jt|jt|j|j|j|j |j |Sr) rWrr$r%rrrr&r'r(r))rdrVrWrXrYrrBs r& wrap_socketz"SecureTransportContext.wrap_socketbs&&&&####'t,,    L   t4 5 t4 5      '   r%c|jSrrrs r&minimum_versionz&SecureTransportContext.minimum_version $$r%r^c||_dSrr])rdr^s r&r^z&SecureTransportContext.minimum_version /r%c|jSrrrs r&maximum_versionz&SecureTransportContext.maximum_versionr_r%rdc||_dSrrc)rdrds r&rdz&SecureTransportContext.maximum_versionrar%)rr*rr)rr*)rrrrr)rr*rrr)r=rrr)NNN)r@rrArrBrvrr)NN)rErrFrrGrrr)rkrKrr)FTTN) rVrXrWrtrXrtrYrtrrrrW)r^r*rr)rdr*rr)rr r r repropertyr-setterr2r7r:r<r?rDrJrUr[r^rdr$r%r&r r sM 8 8 8 8X    X ^^DDDXD2222     ////RRRR "!# .....(## 00000 S S S S"(,%).2 B%%%X%0000%%%X%000000r%r r)r)r*r+r*r,r-rr*)rN) rdrXrrrrrrrrrr)Mr  __future__rr r7r5os.pathrrrrr~ threadingrwarningsweakrefrXr_securetransport.bindingsrr _securetransport.low_levelr r r r rrrwarnDeprecationWarning TYPE_CHECKINGtyping_extensionsr__all__r"r!r(WeakValueDictionaryr__annotations__Lockrrrrrrr rSrrrrrrrrrrrTLSv1_1TLSv1_2rrrrrLrU SSLReadFuncr SSLWriteFuncrrWrr r$r%r&r|s444l#"""""   '''''' 5   *)))))) "8 9y+( G!!""""&y~''  I]8-:VWI!#$$ 73 !!##0+, 73 !!##0+, 73 !!##0+, 73"##$$2-. 73"##$$2-.N$m&AN-5NM8NM8N$m&B &(((())))6,6,6,6,r2,2,2,2,p.-n==/(/@@cEcEcEcEcEcEcEcET  G G G G G" c0c0c0c0c0c0c0c0c0c0r%PKQZ~_"")contrib/__pycache__/socks.cpython-311.pycnu[ bg#dZddlmZ ddlZn%#e$rddlZddlmZejdewxYwddl Z ddl m Z ddl mZmZdd lmZmZdd lmZmZdd lmZdd lmZ ddlZn #e$rdZYnwxYw dd l mZGddeZn#e$re jee jfZYnwxYwGddeZGddeeZ GddeZ!GddeZ"GddeZ#dS)a This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: - SOCKS4A (``proxy_url='socks4a://...``) - SOCKS4 (``proxy_url='socks4://...``) - SOCKS5 with remote DNS (``proxy_url='socks5h://...``) - SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy .. note:: It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in your ``proxy_url`` to ensure that DNS resolution is done from the remote server instead of client-side when connecting to a domain name. SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 supports IPv4, IPv6, and domain names. When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` will be sent as the ``userid`` section of the SOCKS request: .. code-block:: python proxy_url="socks4a://@proxy-host" When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion of the ``proxy_url`` will be sent as the username/password to authenticate with the proxy: .. code-block:: python proxy_url="socks5h://:@proxy-host" ) annotationsN)DependencyWarningzSOCKS support in urllib3 requires the installation of optional dependencies: specifically, PySocks. For more information, see https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies)timeout)HTTPConnectionHTTPSConnection)HTTPConnectionPoolHTTPSConnectionPool)ConnectTimeoutErrorNewConnectionError) PoolManager) parse_url) TypedDictcLeZdZUded<ded<ded<ded<ded<ded <d S) _TYPE_SOCKS_OPTIONSint socks_version str | None proxy_host proxy_portusernamepasswordboolrdnsN)__name__ __module__ __qualname____annotations__L/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/contrib/socks.pyrrMsU r rc,eZdZdZd fd Zd d ZxZS)SOCKSConnectionzG A plain-text HTTP connection that connects via a SOCKS proxy. _socks_optionsrargs typing.AnykwargsreturnNonecH||_tj|i|dS)N)r$super__init__)selfr$r%r' __class__s r!r,zSOCKSConnection.__init__^s/ -$)&)))))r socks.socksocketc i}|jr |j|d<|jr |j|d< tj|j|jff|jd|jd|jd|jd|jd|jd|jd |}n#t$r'}t|d |jd |jd |d }~wtj $rq}|j rQ|j }t|tr"t|d |jd |jd |t|d|t|d||d }~wt$r}t|d||d }~wwxYw|S)zA Establish a new connection via the SOCKS proxy. source_addresssocket_optionsrrrrrr) proxy_type proxy_addrrproxy_usernameproxy_password proxy_rdnsrzConnection to z timed out. (connect timeout=)Nz&Failed to establish a new connection: )r1r2sockscreate_connectionhostportr$r SocketTimeoutr ProxyError socket_err isinstancer OSError)r-extra_kwconneerrors r! _new_connzSOCKSConnection._new_conngs+-   =)-)#2:>.v6     DD   %XXXXXX     |  e]33 -```QUQ]```-NuNN)F1FF   $BqBB    s1A-B E$"CE$A,D>> E$ EE$)r$rr%r&r'r&r(r))r(r/)rrr__doc__r,rF __classcell__r.s@r!r#r#Ys[******88888888r r#ceZdZdS)SOCKSHTTPSConnectionN)rrrrr r!rKrKsDr rKceZdZeZdS)SOCKSHTTPConnectionPoolN)rrrr# ConnectionClsrr r!rMrMs#MMMr rMceZdZeZdS)SOCKSHTTPSConnectionPoolN)rrrrKrNrr r!rPrPs(MMMr rPc8eZdZdZeedZ ddfd ZxZS)SOCKSProxyManagerzh A version of the urllib3 ProxyManager that routes connections via the defined SOCKS proxy. )httphttpsN proxy_urlstrrrr num_poolsrheaderstyping.Mapping[str, str] | Noneconnection_pool_kwr&c *t|}|;|9|j2|jd}t|dkr|\}}|jdkrt j} d} n`|jdkrt j} d} nF|jdkrt j} d} n,|jdkrt j} d} ntd |||_ | |j |j ||| d } | |d <tj ||fi|tj|_dS) N:rsocks5Fsocks5hTsocks4socks4az'Unable to determine SOCKS version from )rrrrrrr$)rauthsplitlenschemer9PROXY_TYPE_SOCKS5PROXY_TYPE_SOCKS4 ValueErrorrVr;r<r+r,rRpool_classes_by_scheme) r-rVrrrXrYr[parsedrcrr socks_optionsr.s r!r,zSOCKSProxyManager.__init__sE9%%   0V[5LK%%c**E5zzQ%*"( =H $ $!3MDD ]i ' '!3MDD ]h & &!3MDD ]i ' '!3MDDRyRRSS S"+ + +     0=+,GBB/ABBB&7&N###r )NNrUN) rVrWrrrrrXrrYrZr[r&) rrrrGrMrPrir,rHrIs@r!rRrRsz () $#37 ,O,O,O,O,O,O,O,O,O,O,Or rR)$rG __future__rr9 ImportErrorwarnings exceptionsrwarntypingsocketrr= connectionrrconnectionpoolr r r r poolmanagerr util.urlrsslrrDictrWAnyr#rKrMrPrRrr r!rzs&&P#""""" LLLL    OOO......HM R      ++++++88888888DDDDDDDD@@@@@@@@%%%%%% JJJJ CCC 7      i777 +c6:o67FFFFFnFFFZ     ?O   $$$$$0$$$)))))2)))7O7O7O7O7O 7O7O7O7O7Os,"1#A((A21A26B B'&B'PKQZ$contrib/_securetransport/__init__.pynu[PKQZ'?B=contrib/_securetransport/__pycache__/__init__.cpython-311.pycnu[ bgdS)Nrz/builddir/build/BUILD/cloudlinux-venv-1.0.7/venv/lib/python3.11/site-packages/urllib3/contrib/_securetransport/__init__.pyrsrPKQZnQ<5<5=contrib/_securetransport/__pycache__/bindings.cpython-311.pycnu[ bgt8 dZddlmZddlZddlmZmZmZmZm Z m Z m Z m Z m Z mZmZmZddlmZejdkr edejdZeeeedZed kred edded ddZeddZeddZeZ e Z!eZ"eZ#eZ$eZ%eZ&eZ'eZ(eZ)eZ*ee)Z+eZ,e Z-ee#Z.ee$Z/ee%Z0ee&Z1ee'Z2eZ3eZ4eZ5eeZ6eZ7eZ8eeZ9eZ:eZ;eeZeeZ?eeZ@eZAeZBeZCeZDeZEeZF e.e/ee7ee8e:ee;e<ee0gejG_He-ejG_IgejJ_He*ejJ_IgejK_He*ejK_IgejL_He*ejL_Ie,e.gejM_He6ejM_Ie6gejN_He.ejN_Ie-egejO_He/ejO_Ie+e6ee9gejP_He-ejP_Ie eee eee<gejQ_He-ejQ_Ieee geja_He-eja_Ie?ee>e gejb_He-ejb_Ie?ee gejc_de-ejc_Ie?ee>ee geje_He-eje_Ie?ee>gejf_He-ejf_Ie?ee=gejg_He-ejg_Ie?ee@gejh_He-ejh_Ie@e0geji_He-eji_Ie@e gejj_ke-ejj_Ie@eeBgejl_He-ejl_Ie@gejm_He!ejm_Ie@e!gejn_He6ejn_Ie,eDeEgejo_He?ejo_Ie?eFe gejp_He-ejp_Ie?e=gejq_He-ejq_Ie?e=gejr_He-ejr_I e?e0gejs_He-ejs_In #et$rYnwxYwe-egejO_He/ejO_IeTe_TeUe_Ue?e_?e=e_=e>e_>e9e_9e Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ) annotationsN) CDLL CFUNCTYPEPOINTERc_boolc_bytec_char_pc_int32c_longc_size_tc_uint32c_ulongc_void_p) find_libraryDarwinzOnly macOS is supported.) z,Only OS X 10.8 and newer are supported, not namestrmacos10_16_pathreturnrc tdkr|}nt|}|stt|dS#t$rt d|ddwxYw)z:Loads a CDLL by name, falling back to known path on 10.16+)rT) use_errnoz The library z failed to loadN) version_inforOSErrorr ImportError)rrpaths z/builddir/build/BUILD/cloudlinux-venv-1.0.7/venv/lib/python3.11/site-packages/urllib3/contrib/_securetransport/bindings.py load_cdllr"As J 8 # #"DD%%D MDD)))) JJJ>>>>??TIJs 69ASecurityz6/System/Library/Frameworks/Security.framework/SecurityCoreFoundationzB/System/Library/Frameworks/CoreFoundation.framework/CoreFoundationkSecImportExportPassphrasekSecImportItemIdentitykCFAllocatorDefaultkCFTypeArrayCallBackskCFTypeDictionaryKeyCallBackskCFTypeDictionaryValueCallBackszError initializing ctypesc(eZdZdZedZdS)CFConstz_ A class object that acts as essentially a namespace for CoreFoundation constants. iN)__name__ __module__ __qualname____doc__CFStringEncodingkCFStringEncodingUTF8r!r,r,s- -,Z88r4r,)rrrrrr)r0 __future__rplatformctypesrrrrrr r r r r rr ctypes.utilrsystemrmac_verversiontuplemapintsplitrrr"r#r$BooleanCFIndexr1CFDataCFStringCFArrayCFMutableArray CFDictionaryCFErrorCFTypeCFTypeID CFTypeRefCFAllocatorRefOSStatus CFDataRef CFStringRef CFArrayRefCFMutableArrayRefCFDictionaryRefCFArrayCallBacksCFDictionaryKeyCallBacksCFDictionaryValueCallBacksSecCertificateRefSecExternalFormatSecExternalItemTypeSecIdentityRefSecItemImportExportFlags SecItemImportExportKeyParametersSecKeychainRef SSLProtocolSSLCipherSuite SSLContextRef SecTrustRefSSLConnectionRefSecTrustResultTypeSecTrustOptionFlagsSSLProtocolSideSSLConnectionTypeSSLSessionOption SecItemImportargtypesrestypeSecCertificateGetTypeIDSecIdentityGetTypeIDSecKeyGetTypeIDSecCertificateCreateWithDataSecCertificateCopyDataSecCopyErrorMessageString SecIdentityCreateWithCertificateSecKeychainCreateSecKeychainDeleteSecPKCS12Import SSLReadFunc SSLWriteFunc SSLSetIOFuncs SSLSetPeerIDSSLSetCertificateSSLSetCertificateAuthoritiesSSLSetConnectionSSLSetPeerDomainName SSLHandshakeSSLReadSSLWriteSSLCloseSSLGetNumberSupportedCiphersSSLGetSupportedCiphersSSLSetEnabledCiphersSSLGetNumberEnabledCiphersargtypeSSLGetEnabledCiphersSSLGetNegotiatedCipherSSLGetNegotiatedProtocolVersionSSLCopyPeerTrustSecTrustSetAnchorCertificates!SecTrustSetAnchorCertificatesOnly argstypesSecTrustEvaluateSecTrustGetCertificateCountSecTrustGetCertificateAtIndexSSLCreateContextSSLSetSessionOptionSSLSetProtocolVersionMinSSLSetProtocolVersionMaxSSLSetALPNProtocolsAttributeErrorin_dllr%r&CFRetain CFRelease CFGetTypeIDCFStringCreateWithCStringCFStringGetCStringPtrCFStringGetCString CFDataCreateCFDataGetLengthCFDataGetBytePtrCFDictionaryCreateCFDictionaryGetValue CFArrayCreateCFArrayCreateMutableCFArrayAppendValueCFArrayGetCountCFArrayGetValueAtIndexr'r(r)r*r,r3r4r!rsw @#"""""                            %$$$$$8?  +/ 0 00 (   Q uSSgmmC001122 ' 'Z|AZZVWZZ   JJJJ" 9H  H         GFOO   GFOO gh WW   GN++',''#%GH%%""##+ "" !! gh ]=!""#$$ 011  'H#&.H"02H$-/7H$,-/H!*,4H!)(*H%'/H$6Di5PH)24EH)10A/BH#,.7H#+3;X2FH&/1 0K$-/7$,,      4)>-**N&,;8?/,,N(4C8?744N06EX_966N2 )N *N!,N%4N""=== +1 2 2<=9999999999s2;SeY,+e,Y41e3Y44Kee PKQZ3GG>contrib/_securetransport/__pycache__/low_level.cpython-311.pycnu[ bg\?PdZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z mZmZmZmZmZmZmZmZmZejdejZddZd?dZd@dZ dAdBdZdCd"Z dDd&Z!dDd'Z"dEd)Z#dFd/Z$dGd1Z%d2d3d4d5d6d7Z&dHd9Z'Gd:d;Z(dS)Ia Low-level helpers for the SecureTransport bindings. These are Python functions that are not directly related to the high-level APIs but are necessary to get them to work. They include a whole bunch of low-level CoreFoundation messing about and memory management. The concerns in this module are almost entirely about trying to avoid memory leaks and providing appropriate and useful assistance to the higher-level code. ) annotationsN) CFArrayCFConstCFData CFDictionaryCFMutableArrayCFString CFTypeRefCoreFoundationSecKeychainRefSecuritys;-----BEGIN CERTIFICATE----- (.*?) -----END CERTIFICATE----- bytestringbytesreturnrc\tjtj|t|S)zv Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. )r CFDataCreatekCFAllocatorDefaultlen)rs {/builddir/build/BUILD/cloudlinux-venv-1.0.7/venv/lib/python3.11/site-packages/urllib3/contrib/_securetransport/low_level.py_cf_data_from_bytesr)s)  &*JJ  tuples#list[tuple[typing.Any, typing.Any]]rct|}d|D}d|D}tj|z|}tj|z|}tjtj|||tjtjS)zK Given a list of Python tuples, create an associated CFDictionary. c3&K|] }|dV dS)rN.0ts r z-_cf_dictionary_from_tuples..<s& ! !QAaD ! ! ! ! ! !rc3&K|] }|dV dS)rNrrs rr!z-_cf_dictionary_from_tuples..=s& # #qad # # # # # #r)rr r CFDictionaryCreaterkCFTypeDictionaryKeyCallBackskCFTypeDictionaryValueCallBacks)rdictionary_sizekeysvaluescf_keys cf_valuess r_cf_dictionary_from_tuplesr+3s &kkO " !& ! ! !D # #F # # #F'/9DAG)O;fEI  ,*46   rpy_bstrr ctj|}tjtj|t j}|S)zi Given a Python binary data, create a CFString. The string must be CFReleased by the caller. )ctypesc_char_pr CFStringCreateWithCStringrrkCFStringEncodingUTF8)r,c_strcf_strs r_cfstrr4Ks; OG $ $E  5* %F Mrlst list[bytes]r cd} tjtjdtjtj}|st d|D]e}t|}|st d tj||tj |M#tj |wxYwn@#t$r3}|rtj |tj d|dd}~wwxYw|S)z Given a list of Python binary data, create an associated CFMutableArray. The array must be CFReleased by the caller. Raises an ssl.SSLError on failure. NrUnable to allocate memory!zUnable to allocate array: ) r CFArrayCreateMutablerr.byrefkCFTypeArrayCallBacks MemoryErrorr4CFArrayAppendValue CFRelease BaseExceptionsslSSLError)r5cf_arritemr3es r_create_cfstring_arrayrEYs-FG4  . L= > >    <:;; ; 1 1DD\\F @!">??? 11&&AAA(0000(0000 1 GGG  -  $V , , ,l;;;<<$FG Ms0A1B:6B  B: B66B:: C7.C22C7value str | Nonecrtj|tjtj}t j|t j}|Mtjd}t j ||dt j}|std|j }|| d}|S)z Creates a Unicode string from a CFString object. Used entirely for error reporting. Yes, it annoys me quite a lot that this function is this complex. Niz'Error copying C string from CFStringRefutf-8) r.castPOINTERc_void_pr CFStringGetCStringPtrrr1create_string_bufferCFStringGetCStringOSErrorrFdecode)rFvalue_as_void_pstringbufferresults r_cf_string_to_unicoderVxsk%)H)HIIO  16F~,T222 VT7+H   ECDD D w'' Mrerrorintexception_classtype[BaseException] | NoneNonec|dkrdStj|d}t|}tj|||dkrd|}| t j}||)z[ Checks the return code and throws an exception if there is an error to report rNz OSStatus )rSecCopyErrorMessageStringrVr r>r@rA)rWrYcf_error_stringoutputs r_assert_no_errorrasy zz8EEO "? 3 3F_--- ~2$U$$, /& ! !!r pem_bundlerc|dd}dt|D}|stjdt jt jdtj t j }|stjd |D]}t|}|stjdtj t j|}t j||stjdt j||t j|n##t $rt j|wxYw|S)z Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. s  c\g|])}tj|d*S)r)base64 b64decodegroup)rmatchs r z(_cert_array_from_pem..s:-2Q((rzNo root certificates specifiedrr8zUnable to build cert object!)replace _PEM_CERTS_REfinditerr@rAr r9rr.r:r;rrSecCertificateCreateWithDatar>r= Exception)rb der_certs cert_array der_bytescertdatacerts r_cert_array_from_pemrus ##GU33J6C6L6LZ6X6XI =l;<<<4*  ^9::J 9l7888" + +I*955H Al#?@@@82HD  $X . . . Cl#ABBB  -j$ ? ? ?  $T * * * * +   ,,,  s $BE E!rCr boolcXtj}tj||kS)z= Returns True if a given CFTypeRef is a certificate. )rSecCertificateGetTypeIDr CFGetTypeIDrCexpecteds r_is_certr|s(/11H  %d + +x 77rcXtj}tj||kS)z; Returns True if a given CFTypeRef is an identity. )rSecIdentityGetTypeIDr ryrzs r _is_identityrs(,..H  %d + +x 77rtuple[SecKeychainRef, str]c tjd}tj|ddd}tj|dd}t j}tj|| d}tj }tj |t||ddtj|}t!|||fS)a This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. (NrIF)osurandomrf b16encoderQtempfilemkdtemppathjoinencoderr SecKeychainCreaterr.r:ra) random_bytesfilenamepassword tempdirectory keychain_pathkeychainstatuss r_temporary_keychainrs":b>>L RaR 01188AAH QRR 011H$&&MGLL99@@IIM&((H  's8}}htV\(=S=SFV ] ""rrr rstr'tuple[list[CFTypeRef], list[CFTypeRef]]c g}g}d}t|d5}|}dddn #1swxYwY tjtj|t |}tj}tj|ddddd|tj |}t|tj |} t| D]} tj|| } tj| tj} t#| r*tj| || ot)| r)tj| ||  |rtj|tj|n/#|rtj|tj|wxYw||fS)z Given a single file, loads all the trust objects from it into arrays and the keychain. Returns a tuple of lists: the first list is a list of identities, the second a list of certs. Nrbr)openreadr rrr CFArrayRefr SecItemImportr.r:raCFArrayGetCountrangeCFArrayGetValueAtIndexrJr r|CFRetainappendrr>) rr certificates identities result_arrayf raw_filedatafiledatarU result_countindexrCs r_load_items_from_filer s,LJL dD   Qvvxx                $+!.  . c,>O>O  &022 '       L & &       &5lCC <(( ( (E!8uMMD;t^%=>>D~~ ('---##D))))d## ('---!!$''' (  3  $\ 2 2 2 ****  3  $\ 2 2 2 ****  %%s8<<EF77,G#pathscTg}g}d|D} |D]?}t||\}}||||@|stj}tj||dt j|} t| ||tj | dtj tj dt jtj} tj||D]} tj| | | tj||D]} tj | S#tj||D]} tj | wxYw)z Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. c3K|]}||V dSNr)rrs rr!z*_load_client_cert_chain..fs'55t5d555555rr)rextendrSecIdentityRef SecIdentityCreateWithCertificater.r:rarr r>popr9rr; itertoolschainr=) rrrrfiltered_paths file_pathnew_identities new_certs new_identityr trust_chainrCobjs r_load_client_cert_chainrBs@LJ65u555N"*' + +I(=h (R(R %NI   n - - -    * * * * :#244L>,q/6< +E+EF V $ $ $   l + + +  $\%5%5a%8%8 9 9 9%9  . L= > >  OJ == A AD  -k4 @ @ @ @?:|<< * *C  $S ) ) ) ) *9?:|<< * *C  $S ) ) ) ) *s D:E99.F')r)r)rr)rr)rr)SSLv2SSLv3TLSv1zTLSv1.1zTLSv1.2versionct|\}}d}d}tjd||}t|}d}tjd|||||z}|S)z6 Builds a TLS alert record for an unknown CA. r0z>BBz>BBBH)TLS_PROTOCOL_VERSIONSstructpackr) rver_majver_minseverity_fataldescription_unknown_camsgmsg_lenrecord_type_alertrecords r_build_tls_unknown_ca_alertrsf-W5GWN! +e^-C D DC#hhG ["3Wgw O ORU UF MrceZdZdZdZdZdZdZdZdZ dZ d Z dZ dZ dZdZdZd ZdZd Zd ZdZd ZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(d Z)d!Z*d"Z+d#S)$ SecurityConstzU A class object that acts as essentially a namespace for Security constants. rrrr iriiiiiiiiiiiiiiiiiii iQi,iRN),__name__ __module__ __qualname____doc__"kSSLSessionOptionBreakOnServerAuth kSSLProtocol2 kSSLProtocol3 kTLSProtocol1kTLSProtocol11kTLSProtocol12kTLSProtocol13kTLSProtocolMaxSupportedkSSLClientSidekSSLStreamTypekSecFormatPEMSequencekSecTrustResultInvalidkSecTrustResultProceedkSecTrustResultDenykSecTrustResultUnspecified&kSecTrustResultRecoverableTrustFailure kSecTrustResultFatalTrustFailurekSecTrustResultOtherErrorerrSSLProtocolerrSSLWouldBlockerrSSLClosedGracefulerrSSLClosedNoNotifyerrSSLClosedAborterrSSLXCertChainInvalid errSSLCryptoerrSSLInternalerrSSLCertExpirederrSSLCertNotYetValiderrSSLUnknownRootCerterrSSLNoRootCerterrSSLHostNameMismatcherrSSLPeerHandshakeFailerrSSLPeerUserCancellederrSSLWeakPeerEphemeralDHKeyerrSSLServerAuthCompletederrSSLRecordOverflowerrSecVerifyFailederrSecNoTrustSettingserrSecItemNotFounderrSecInvalidTrustSettingsrrrrrs*+&MMMNNN"NN!"-.*'($ !N  #LN!!"###( % "!'rr)rrrr)rrrr)r,rrr )r5r6rr )rFr rrGr)rWrXrYrZrr[)rbrrr)rCr rrv)rr)rr rrrr)rr rrGrr)rrrr))r __future__rrfr.rrrer@rrtypingbindingsrrrrr r r r r rcompileDOTALLrlrr+r4rErVrarur|rrrrrrrrrrr sr#"""""                            Dbi 0    >4?C""""".++++\88888888 # # # #F6&6&6&6&rH*H*H*H*X       6(6(6(6(6(6(6(6(6(6(rPKQZ-t8t8$contrib/_securetransport/bindings.pynu[# type: ignore """ This module uses ctypes to bind a whole bunch of functions and constants from SecureTransport. The goal here is to provide the low-level API to SecureTransport. These are essentially the C-level functions and constants, and they're pretty gross to work with. This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import annotations import platform from ctypes import ( CDLL, CFUNCTYPE, POINTER, c_bool, c_byte, c_char_p, c_int32, c_long, c_size_t, c_uint32, c_ulong, c_void_p, ) from ctypes.util import find_library if platform.system() != "Darwin": raise ImportError("Only macOS is supported") version = platform.mac_ver()[0] version_info = tuple(map(int, version.split("."))) if version_info < (10, 8): raise OSError( f"Only OS X 10.8 and newer are supported, not {version_info[0]}.{version_info[1]}" ) def load_cdll(name: str, macos10_16_path: str) -> CDLL: """Loads a CDLL by name, falling back to known path on 10.16+""" try: # Big Sur is technically 11 but we use 10.16 due to the Big Sur # beta being labeled as 10.16. path: str | None if version_info >= (10, 16): path = macos10_16_path else: path = find_library(name) if not path: raise OSError # Caught and reraised as 'ImportError' return CDLL(path, use_errno=True) except OSError: raise ImportError(f"The library {name} failed to load") from None Security = load_cdll( "Security", "/System/Library/Frameworks/Security.framework/Security" ) CoreFoundation = load_cdll( "CoreFoundation", "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", ) Boolean = c_bool CFIndex = c_long CFStringEncoding = c_uint32 CFData = c_void_p CFString = c_void_p CFArray = c_void_p CFMutableArray = c_void_p CFDictionary = c_void_p CFError = c_void_p CFType = c_void_p CFTypeID = c_ulong CFTypeRef = POINTER(CFType) CFAllocatorRef = c_void_p OSStatus = c_int32 CFDataRef = POINTER(CFData) CFStringRef = POINTER(CFString) CFArrayRef = POINTER(CFArray) CFMutableArrayRef = POINTER(CFMutableArray) CFDictionaryRef = POINTER(CFDictionary) CFArrayCallBacks = c_void_p CFDictionaryKeyCallBacks = c_void_p CFDictionaryValueCallBacks = c_void_p SecCertificateRef = POINTER(c_void_p) SecExternalFormat = c_uint32 SecExternalItemType = c_uint32 SecIdentityRef = POINTER(c_void_p) SecItemImportExportFlags = c_uint32 SecItemImportExportKeyParameters = c_void_p SecKeychainRef = POINTER(c_void_p) SSLProtocol = c_uint32 SSLCipherSuite = c_uint32 SSLContextRef = POINTER(c_void_p) SecTrustRef = POINTER(c_void_p) SSLConnectionRef = c_uint32 SecTrustResultType = c_uint32 SecTrustOptionFlags = c_uint32 SSLProtocolSide = c_uint32 SSLConnectionType = c_uint32 SSLSessionOption = c_uint32 try: Security.SecItemImport.argtypes = [ CFDataRef, CFStringRef, POINTER(SecExternalFormat), POINTER(SecExternalItemType), SecItemImportExportFlags, POINTER(SecItemImportExportKeyParameters), SecKeychainRef, POINTER(CFArrayRef), ] Security.SecItemImport.restype = OSStatus Security.SecCertificateGetTypeID.argtypes = [] Security.SecCertificateGetTypeID.restype = CFTypeID Security.SecIdentityGetTypeID.argtypes = [] Security.SecIdentityGetTypeID.restype = CFTypeID Security.SecKeyGetTypeID.argtypes = [] Security.SecKeyGetTypeID.restype = CFTypeID Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] Security.SecCertificateCreateWithData.restype = SecCertificateRef Security.SecCertificateCopyData.argtypes = [SecCertificateRef] Security.SecCertificateCopyData.restype = CFDataRef Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SecIdentityCreateWithCertificate.argtypes = [ CFTypeRef, SecCertificateRef, POINTER(SecIdentityRef), ] Security.SecIdentityCreateWithCertificate.restype = OSStatus Security.SecKeychainCreate.argtypes = [ c_char_p, c_uint32, c_void_p, Boolean, c_void_p, POINTER(SecKeychainRef), ] Security.SecKeychainCreate.restype = OSStatus Security.SecKeychainDelete.argtypes = [SecKeychainRef] Security.SecKeychainDelete.restype = OSStatus Security.SecPKCS12Import.argtypes = [ CFDataRef, CFDictionaryRef, POINTER(CFArrayRef), ] Security.SecPKCS12Import.restype = OSStatus SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) SSLWriteFunc = CFUNCTYPE( OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) ) Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] Security.SSLSetIOFuncs.restype = OSStatus Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerID.restype = OSStatus Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] Security.SSLSetCertificate.restype = OSStatus Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] Security.SSLSetCertificateAuthorities.restype = OSStatus Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] Security.SSLSetConnection.restype = OSStatus Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] Security.SSLSetPeerDomainName.restype = OSStatus Security.SSLHandshake.argtypes = [SSLContextRef] Security.SSLHandshake.restype = OSStatus Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLRead.restype = OSStatus Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] Security.SSLWrite.restype = OSStatus Security.SSLClose.argtypes = [SSLContextRef] Security.SSLClose.restype = OSStatus Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberSupportedCiphers.restype = OSStatus Security.SSLGetSupportedCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), POINTER(c_size_t), ] Security.SSLGetSupportedCiphers.restype = OSStatus Security.SSLSetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), c_size_t, ] Security.SSLSetEnabledCiphers.restype = OSStatus Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] Security.SSLGetNumberEnabledCiphers.restype = OSStatus Security.SSLGetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), POINTER(c_size_t), ] Security.SSLGetEnabledCiphers.restype = OSStatus Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] Security.SSLGetNegotiatedCipher.restype = OSStatus Security.SSLGetNegotiatedProtocolVersion.argtypes = [ SSLContextRef, POINTER(SSLProtocol), ] Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] Security.SSLCopyPeerTrust.restype = OSStatus Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] Security.SecTrustSetAnchorCertificates.restype = OSStatus Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] Security.SecTrustEvaluate.restype = OSStatus Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] Security.SecTrustGetCertificateCount.restype = CFIndex Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef Security.SSLCreateContext.argtypes = [ CFAllocatorRef, SSLProtocolSide, SSLConnectionType, ] Security.SSLCreateContext.restype = SSLContextRef Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] Security.SSLSetSessionOption.restype = OSStatus Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMin.restype = OSStatus Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] Security.SSLSetProtocolVersionMax.restype = OSStatus try: Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] Security.SSLSetALPNProtocols.restype = OSStatus except AttributeError: # Supported only in 10.12+ pass Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SSLReadFunc = SSLReadFunc Security.SSLWriteFunc = SSLWriteFunc Security.SSLContextRef = SSLContextRef Security.SSLProtocol = SSLProtocol Security.SSLCipherSuite = SSLCipherSuite Security.SecIdentityRef = SecIdentityRef Security.SecKeychainRef = SecKeychainRef Security.SecTrustRef = SecTrustRef Security.SecTrustResultType = SecTrustResultType Security.SecExternalFormat = SecExternalFormat Security.OSStatus = OSStatus Security.kSecImportExportPassphrase = CFStringRef.in_dll( Security, "kSecImportExportPassphrase" ) Security.kSecImportItemIdentity = CFStringRef.in_dll( Security, "kSecImportItemIdentity" ) # CoreFoundation time! CoreFoundation.CFRetain.argtypes = [CFTypeRef] CoreFoundation.CFRetain.restype = CFTypeRef CoreFoundation.CFRelease.argtypes = [CFTypeRef] CoreFoundation.CFRelease.restype = None CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] CoreFoundation.CFGetTypeID.restype = CFTypeID CoreFoundation.CFStringCreateWithCString.argtypes = [ CFAllocatorRef, c_char_p, CFStringEncoding, ] CoreFoundation.CFStringCreateWithCString.restype = CFStringRef CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] CoreFoundation.CFStringGetCStringPtr.restype = c_char_p CoreFoundation.CFStringGetCString.argtypes = [ CFStringRef, c_char_p, CFIndex, CFStringEncoding, ] CoreFoundation.CFStringGetCString.restype = c_bool CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] CoreFoundation.CFDataCreate.restype = CFDataRef CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] CoreFoundation.CFDataGetLength.restype = CFIndex CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] CoreFoundation.CFDataGetBytePtr.restype = c_void_p CoreFoundation.CFDictionaryCreate.argtypes = [ CFAllocatorRef, POINTER(CFTypeRef), POINTER(CFTypeRef), CFIndex, CFDictionaryKeyCallBacks, CFDictionaryValueCallBacks, ] CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef CoreFoundation.CFArrayCreate.argtypes = [ CFAllocatorRef, POINTER(CFTypeRef), CFIndex, CFArrayCallBacks, ] CoreFoundation.CFArrayCreate.restype = CFArrayRef CoreFoundation.CFArrayCreateMutable.argtypes = [ CFAllocatorRef, CFIndex, CFArrayCallBacks, ] CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] CoreFoundation.CFArrayAppendValue.restype = None CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] CoreFoundation.CFArrayGetCount.restype = CFIndex CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( CoreFoundation, "kCFAllocatorDefault" ) CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( CoreFoundation, "kCFTypeArrayCallBacks" ) CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( CoreFoundation, "kCFTypeDictionaryKeyCallBacks" ) CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( CoreFoundation, "kCFTypeDictionaryValueCallBacks" ) CoreFoundation.CFTypeRef = CFTypeRef CoreFoundation.CFArrayRef = CFArrayRef CoreFoundation.CFStringRef = CFStringRef CoreFoundation.CFDictionaryRef = CFDictionaryRef except AttributeError: raise ImportError("Error initializing ctypes") from None class CFConst: """ A class object that acts as essentially a namespace for CoreFoundation constants. """ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) PKQZ6\?\?%contrib/_securetransport/low_level.pynu[""" Low-level helpers for the SecureTransport bindings. These are Python functions that are not directly related to the high-level APIs but are necessary to get them to work. They include a whole bunch of low-level CoreFoundation messing about and memory management. The concerns in this module are almost entirely about trying to avoid memory leaks and providing appropriate and useful assistance to the higher-level code. """ from __future__ import annotations import base64 import ctypes import itertools import os import re import ssl import struct import tempfile import typing from .bindings import ( # type: ignore[attr-defined] CFArray, CFConst, CFData, CFDictionary, CFMutableArray, CFString, CFTypeRef, CoreFoundation, SecKeychainRef, Security, ) # This regular expression is used to grab PEM data out of a PEM bundle. _PEM_CERTS_RE = re.compile( b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL ) def _cf_data_from_bytes(bytestring: bytes) -> CFData: """ Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. """ return CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) ) def _cf_dictionary_from_tuples( tuples: list[tuple[typing.Any, typing.Any]] ) -> CFDictionary: """ Given a list of Python tuples, create an associated CFDictionary. """ dictionary_size = len(tuples) # We need to get the dictionary keys and values out in the same order. keys = (t[0] for t in tuples) values = (t[1] for t in tuples) cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) return CoreFoundation.CFDictionaryCreate( CoreFoundation.kCFAllocatorDefault, cf_keys, cf_values, dictionary_size, CoreFoundation.kCFTypeDictionaryKeyCallBacks, CoreFoundation.kCFTypeDictionaryValueCallBacks, ) def _cfstr(py_bstr: bytes) -> CFString: """ Given a Python binary data, create a CFString. The string must be CFReleased by the caller. """ c_str = ctypes.c_char_p(py_bstr) cf_str = CoreFoundation.CFStringCreateWithCString( CoreFoundation.kCFAllocatorDefault, c_str, CFConst.kCFStringEncodingUTF8, ) return cf_str def _create_cfstring_array(lst: list[bytes]) -> CFMutableArray: """ Given a list of Python binary data, create an associated CFMutableArray. The array must be CFReleased by the caller. Raises an ssl.SSLError on failure. """ cf_arr = None try: cf_arr = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) if not cf_arr: raise MemoryError("Unable to allocate memory!") for item in lst: cf_str = _cfstr(item) if not cf_str: raise MemoryError("Unable to allocate memory!") try: CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) finally: CoreFoundation.CFRelease(cf_str) except BaseException as e: if cf_arr: CoreFoundation.CFRelease(cf_arr) raise ssl.SSLError(f"Unable to allocate array: {e}") from None return cf_arr def _cf_string_to_unicode(value: CFString) -> str | None: """ Creates a Unicode string from a CFString object. Used entirely for error reporting. Yes, it annoys me quite a lot that this function is this complex. """ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) string = CoreFoundation.CFStringGetCStringPtr( value_as_void_p, CFConst.kCFStringEncodingUTF8 ) if string is None: buffer = ctypes.create_string_buffer(1024) result = CoreFoundation.CFStringGetCString( value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 ) if not result: raise OSError("Error copying C string from CFStringRef") string = buffer.value if string is not None: string = string.decode("utf-8") return string # type: ignore[no-any-return] def _assert_no_error( error: int, exception_class: type[BaseException] | None = None ) -> None: """ Checks the return code and throws an exception if there is an error to report """ if error == 0: return cf_error_string = Security.SecCopyErrorMessageString(error, None) output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) if output is None or output == "": output = f"OSStatus {error}" if exception_class is None: exception_class = ssl.SSLError raise exception_class(output) def _cert_array_from_pem(pem_bundle: bytes) -> CFArray: """ Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. """ # Normalize the PEM bundle's line endings. pem_bundle = pem_bundle.replace(b"\r\n", b"\n") der_certs = [ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") try: for der_bytes in der_certs: certdata = _cf_data_from_bytes(der_bytes) if not certdata: raise ssl.SSLError("Unable to allocate memory!") cert = Security.SecCertificateCreateWithData( CoreFoundation.kCFAllocatorDefault, certdata ) CoreFoundation.CFRelease(certdata) if not cert: raise ssl.SSLError("Unable to build cert object!") CoreFoundation.CFArrayAppendValue(cert_array, cert) CoreFoundation.CFRelease(cert) except Exception: # We need to free the array before the exception bubbles further. # We only want to do that if an error occurs: otherwise, the caller # should free. CoreFoundation.CFRelease(cert_array) raise return cert_array def _is_cert(item: CFTypeRef) -> bool: """ Returns True if a given CFTypeRef is a certificate. """ expected = Security.SecCertificateGetTypeID() return CoreFoundation.CFGetTypeID(item) == expected # type: ignore[no-any-return] def _is_identity(item: CFTypeRef) -> bool: """ Returns True if a given CFTypeRef is an identity. """ expected = Security.SecIdentityGetTypeID() return CoreFoundation.CFGetTypeID(item) == expected # type: ignore[no-any-return] def _temporary_keychain() -> tuple[SecKeychainRef, str]: """ This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. """ # Unfortunately, SecKeychainCreate requires a path to a keychain. This # means we cannot use mkstemp to use a generic temporary file. Instead, # we're going to create a temporary directory and a filename to use there. # This filename will be 8 random bytes expanded into base64. We also need # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) filename = base64.b16encode(random_bytes[:8]).decode("utf-8") password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") # We now want to create the keychain itself. keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) # Having created the keychain, we want to pass it off to the caller. return keychain, tempdirectory def _load_items_from_file( keychain: SecKeychainRef, path: str ) -> tuple[list[CFTypeRef], list[CFTypeRef]]: """ Given a single file, loads all the trust objects from it into arrays and the keychain. Returns a tuple of lists: the first list is a list of identities, the second a list of certs. """ certificates = [] identities = [] result_array = None with open(path, "rb") as f: raw_filedata = f.read() try: filedata = CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) ) result_array = CoreFoundation.CFArrayRef() result = Security.SecItemImport( filedata, # cert data None, # Filename, leaving it out for now None, # What the type of the file is, we don't care None, # what's in the file, we don't care 0, # import flags None, # key params, can include passphrase in the future keychain, # The keychain to insert into ctypes.byref(result_array), # Results ) _assert_no_error(result) # A CFArray is not very useful to us as an intermediary # representation, so we are going to extract the objects we want # and then free the array. We don't need to keep hold of keys: the # keychain already has them! result_count = CoreFoundation.CFArrayGetCount(result_array) for index in range(result_count): item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) item = ctypes.cast(item, CoreFoundation.CFTypeRef) if _is_cert(item): CoreFoundation.CFRetain(item) certificates.append(item) elif _is_identity(item): CoreFoundation.CFRetain(item) identities.append(item) finally: if result_array: CoreFoundation.CFRelease(result_array) CoreFoundation.CFRelease(filedata) return (identities, certificates) def _load_client_cert_chain(keychain: SecKeychainRef, *paths: str | None) -> CFArray: """ Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. """ # Ok, the strategy. # # This relies on knowing that macOS will not give you a SecIdentityRef # unless you have imported a key into a keychain. This is a somewhat # artificial limitation of macOS (for example, it doesn't necessarily # affect iOS), but there is nothing inside Security.framework that lets you # get a SecIdentityRef without having a key in a keychain. # # So the policy here is we take all the files and iterate them in order. # Each one will use SecItemImport to have one or more objects loaded from # it. We will also point at a keychain that macOS can use to work with the # private key. # # Once we have all the objects, we'll check what we actually have. If we # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, # we'll take the first certificate (which we assume to be our leaf) and # ask the keychain to give us a SecIdentityRef with that cert's associated # key. # # We'll then return a CFArray containing the trust chain: one # SecIdentityRef and then zero-or-more SecCertificateRef objects. The # responsibility for freeing this CFArray will be with the caller. This # CFArray must remain alive for the entire connection, so in practice it # will be stored with a single SSLSocket, along with the reference to the # keychain. certificates = [] identities = [] # Filter out bad paths. filtered_paths = (path for path in paths if path) try: for file_path in filtered_paths: new_identities, new_certs = _load_items_from_file(keychain, file_path) identities.extend(new_identities) certificates.extend(new_certs) # Ok, we have everything. The question is: do we have an identity? If # not, we want to grab one from the first cert we have. if not identities: new_identity = Security.SecIdentityRef() status = Security.SecIdentityCreateWithCertificate( keychain, certificates[0], ctypes.byref(new_identity) ) _assert_no_error(status) identities.append(new_identity) # We now want to release the original certificate, as we no longer # need it. CoreFoundation.CFRelease(certificates.pop(0)) # We now need to build a new CFArray that holds the trust chain. trust_chain = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) for item in itertools.chain(identities, certificates): # ArrayAppendValue does a CFRetain on the item. That's fine, # because the finally block will release our other refs to them. CoreFoundation.CFArrayAppendValue(trust_chain, item) return trust_chain finally: for obj in itertools.chain(identities, certificates): CoreFoundation.CFRelease(obj) TLS_PROTOCOL_VERSIONS = { "SSLv2": (0, 2), "SSLv3": (3, 0), "TLSv1": (3, 1), "TLSv1.1": (3, 2), "TLSv1.2": (3, 3), } def _build_tls_unknown_ca_alert(version: str) -> bytes: """ Builds a TLS alert record for an unknown CA. """ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] severity_fatal = 0x02 description_unknown_ca = 0x30 msg = struct.pack(">BB", severity_fatal, description_unknown_ca) msg_len = len(msg) record_type_alert = 0x15 record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg return record class SecurityConst: """ A class object that acts as essentially a namespace for Security constants. """ kSSLSessionOptionBreakOnServerAuth = 0 kSSLProtocol2 = 1 kSSLProtocol3 = 2 kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 # SecureTransport does not support TLS 1.3 even if there's a constant for it kTLSProtocol13 = 10 kTLSProtocolMaxSupported = 999 kSSLClientSide = 1 kSSLStreamType = 0 kSecFormatPEMSequence = 10 kSecTrustResultInvalid = 0 kSecTrustResultProceed = 1 # This gap is present on purpose: this was kSecTrustResultConfirm, which # is deprecated. kSecTrustResultDeny = 3 kSecTrustResultUnspecified = 4 kSecTrustResultRecoverableTrustFailure = 5 kSecTrustResultFatalTrustFailure = 6 kSecTrustResultOtherError = 7 errSSLProtocol = -9800 errSSLWouldBlock = -9803 errSSLClosedGraceful = -9805 errSSLClosedNoNotify = -9816 errSSLClosedAbort = -9806 errSSLXCertChainInvalid = -9807 errSSLCrypto = -9809 errSSLInternal = -9810 errSSLCertExpired = -9814 errSSLCertNotYetValid = -9815 errSSLUnknownRootCert = -9812 errSSLNoRootCert = -9813 errSSLHostNameMismatch = -9843 errSSLPeerHandshakeFail = -9824 errSSLPeerUserCancelled = -9839 errSSLWeakPeerEphemeralDHKey = -9850 errSSLServerAuthCompleted = -9841 errSSLRecordOverflow = -9847 errSecVerifyFailed = -67808 errSecNoTrustSettings = -25263 errSecItemNotFound = -25300 errSecInvalidTrustSettings = -25262 PKQZn\KKcontrib/pyopenssl.pynu[""" Module for using pyOpenSSL as a TLS backend. This module was relevant before the standard library ``ssl`` module supported SNI, but now that we've dropped support for Python 2.7 all relevant Python versions support SNI so **this module is no longer recommended**. This needs the following packages installed: * `pyOpenSSL`_ (tested with 16.0.0) * `cryptography`_ (minimum 1.3.4, from pyopenssl) * `idna`_ (minimum 2.0, from cryptography) However, pyOpenSSL depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: .. code-block:: bash $ python -m pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this: .. code-block:: python try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass .. _pyopenssl: https://www.pyopenssl.org .. _cryptography: https://cryptography.io .. _idna: https://github.com/kjd/idna """ from __future__ import annotations import OpenSSL.SSL # type: ignore[import] from cryptography import x509 try: from cryptography.x509 import UnsupportedExtension # type: ignore[attr-defined] except ImportError: # UnsupportedExtension is gone in cryptography >= 2.1.0 class UnsupportedExtension(Exception): # type: ignore[no-redef] pass import logging import ssl import typing import warnings from io import BytesIO from socket import socket as socket_cls from socket import timeout from .. import util warnings.warn( "'urllib3.contrib.pyopenssl' module is deprecated and will be removed " "in urllib3 v2.1.0. Read more in this issue: " "https://github.com/urllib3/urllib3/issues/2680", category=DeprecationWarning, stacklevel=2, ) if typing.TYPE_CHECKING: from OpenSSL.crypto import X509 # type: ignore[import] __all__ = ["inject_into_urllib3", "extract_from_urllib3"] # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { util.ssl_.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined] util.ssl_.PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined] ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } _openssl_to_stdlib_verify = {v: k for k, v in _stdlib_to_openssl_verify.items()} # The SSLvX values are the most likely to be missing in the future # but we check them all just to be sure. _OP_NO_SSLv2_OR_SSLv3: int = getattr(OpenSSL.SSL, "OP_NO_SSLv2", 0) | getattr( OpenSSL.SSL, "OP_NO_SSLv3", 0 ) _OP_NO_TLSv1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1", 0) _OP_NO_TLSv1_1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_1", 0) _OP_NO_TLSv1_2: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_2", 0) _OP_NO_TLSv1_3: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_3", 0) _openssl_to_ssl_minimum_version: dict[int, int] = { ssl.TLSVersion.MINIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3, ssl.TLSVersion.TLSv1: _OP_NO_SSLv2_OR_SSLv3, ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1, ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1, ssl.TLSVersion.TLSv1_3: ( _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 ), ssl.TLSVersion.MAXIMUM_SUPPORTED: ( _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 ), } _openssl_to_ssl_maximum_version: dict[int, int] = { ssl.TLSVersion.MINIMUM_SUPPORTED: ( _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3 ), ssl.TLSVersion.TLSv1: ( _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3 ), ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3, ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_3, ssl.TLSVersion.TLSv1_3: _OP_NO_SSLv2_OR_SSLv3, ssl.TLSVersion.MAXIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3, } # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 orig_util_SSLContext = util.ssl_.SSLContext log = logging.getLogger(__name__) def inject_into_urllib3() -> None: "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." _validate_dependencies_met() util.SSLContext = PyOpenSSLContext # type: ignore[assignment] util.ssl_.SSLContext = PyOpenSSLContext # type: ignore[assignment] util.IS_PYOPENSSL = True util.ssl_.IS_PYOPENSSL = True def extract_from_urllib3() -> None: "Undo monkey-patching by :func:`inject_into_urllib3`." util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.IS_PYOPENSSL = False util.ssl_.IS_PYOPENSSL = False def _validate_dependencies_met() -> None: """ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. """ # Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions if getattr(Extensions, "get_extension_for_class", None) is None: raise ImportError( "'cryptography' module missing required functionality. " "Try upgrading to v1.3.4 or newer." ) # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 x509 = X509() if getattr(x509, "_x509", None) is None: raise ImportError( "'pyOpenSSL' module missing required functionality. " "Try upgrading to v0.14 or newer." ) def _dnsname_to_stdlib(name: str) -> str | None: """ Converts a dNSName SubjectAlternativeName field to the form used by the standard library on the given Python version. Cryptography produces a dNSName as a unicode string that was idna-decoded from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). If the name cannot be idna-encoded then we return None signalling that the name given should be skipped. """ def idna_encode(name: str) -> bytes | None: """ Borrowed wholesale from the Python Cryptography Project. It turns out that we can't just safely call `idna.encode`: it can explode for wildcard names. This avoids that problem. """ import idna try: for prefix in ["*.", "."]: if name.startswith(prefix): name = name[len(prefix) :] return prefix.encode("ascii") + idna.encode(name) return idna.encode(name) except idna.core.IDNAError: return None # Don't send IPv6 addresses through the IDNA encoder. if ":" in name: return name encoded_name = idna_encode(name) if encoded_name is None: return None return encoded_name.decode("utf-8") def get_subj_alt_name(peer_cert: X509) -> list[tuple[str, str]]: """ Given an PyOpenSSL certificate, provides all the subject alternative names. """ cert = peer_cert.to_cryptography() # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] except ( x509.DuplicateExtension, UnsupportedExtension, x509.UnsupportedGeneralNameType, UnicodeError, ) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( "A problem was encountered with the certificate that prevented " "urllib3 from finding the SubjectAlternativeName field. This can " "affect certificate validation. The error was %s", e, ) return [] # We want to return dNSName and iPAddress fields. We need to cast the IPs # back to strings because the match_hostname function wants them as # strings. # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 # decoded. This is pretty frustrating, but that's what the standard library # does with certificates, and so we need to attempt to do the same. # We also want to skip over names which cannot be idna encoded. names = [ ("DNS", name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) if name is not None ] names.extend( ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names class WrappedSocket: """API-compatibility wrapper for Python OpenSSL's Connection-class.""" def __init__( self, connection: OpenSSL.SSL.Connection, socket: socket_cls, suppress_ragged_eofs: bool = True, ) -> None: self.connection = connection self.socket = socket self.suppress_ragged_eofs = suppress_ragged_eofs self._io_refs = 0 self._closed = False def fileno(self) -> int: return self.socket.fileno() # Copy-pasted from Python 3.5 source code def _decref_socketios(self) -> None: if self._io_refs > 0: self._io_refs -= 1 if self._closed: self.close() def recv(self, *args: typing.Any, **kwargs: typing.Any) -> bytes: try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): return b"" else: raise OSError(e.args[0], str(e)) from e except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return b"" else: raise except OpenSSL.SSL.WantReadError as e: if not util.wait_for_read(self.socket, self.socket.gettimeout()): raise timeout("The read operation timed out") from e else: return self.recv(*args, **kwargs) # TLS 1.3 post-handshake authentication except OpenSSL.SSL.Error as e: raise ssl.SSLError(f"read error: {e!r}") from e else: return data # type: ignore[no-any-return] def recv_into(self, *args: typing.Any, **kwargs: typing.Any) -> int: try: return self.connection.recv_into(*args, **kwargs) # type: ignore[no-any-return] except OpenSSL.SSL.SysCallError as e: if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): return 0 else: raise OSError(e.args[0], str(e)) from e except OpenSSL.SSL.ZeroReturnError: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return 0 else: raise except OpenSSL.SSL.WantReadError as e: if not util.wait_for_read(self.socket, self.socket.gettimeout()): raise timeout("The read operation timed out") from e else: return self.recv_into(*args, **kwargs) # TLS 1.3 post-handshake authentication except OpenSSL.SSL.Error as e: raise ssl.SSLError(f"read error: {e!r}") from e def settimeout(self, timeout: float) -> None: return self.socket.settimeout(timeout) def _send_until_done(self, data: bytes) -> int: while True: try: return self.connection.send(data) # type: ignore[no-any-return] except OpenSSL.SSL.WantWriteError as e: if not util.wait_for_write(self.socket, self.socket.gettimeout()): raise timeout() from e continue except OpenSSL.SSL.SysCallError as e: raise OSError(e.args[0], str(e)) from e def sendall(self, data: bytes) -> None: total_sent = 0 while total_sent < len(data): sent = self._send_until_done( data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] ) total_sent += sent def shutdown(self) -> None: # FIXME rethrow compatible exceptions should we ever use this self.connection.shutdown() def close(self) -> None: self._closed = True if self._io_refs <= 0: self._real_close() def _real_close(self) -> None: try: return self.connection.close() # type: ignore[no-any-return] except OpenSSL.SSL.Error: return def getpeercert( self, binary_form: bool = False ) -> dict[str, list[typing.Any]] | None: x509 = self.connection.get_peer_certificate() if not x509: return x509 # type: ignore[no-any-return] if binary_form: return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) # type: ignore[no-any-return] return { "subject": ((("commonName", x509.get_subject().CN),),), # type: ignore[dict-item] "subjectAltName": get_subj_alt_name(x509), } def version(self) -> str: return self.connection.get_protocol_version_name() # type: ignore[no-any-return] WrappedSocket.makefile = socket_cls.makefile # type: ignore[attr-defined] class PyOpenSSLContext: """ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. """ def __init__(self, protocol: int) -> None: self.protocol = _openssl_versions[protocol] self._ctx = OpenSSL.SSL.Context(self.protocol) self._options = 0 self.check_hostname = False self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED @property def options(self) -> int: return self._options @options.setter def options(self, value: int) -> None: self._options = value self._set_ctx_options() @property def verify_mode(self) -> int: return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] @verify_mode.setter def verify_mode(self, value: ssl.VerifyMode) -> None: self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) def set_default_verify_paths(self) -> None: self._ctx.set_default_verify_paths() def set_ciphers(self, ciphers: bytes | str) -> None: if isinstance(ciphers, str): ciphers = ciphers.encode("utf-8") self._ctx.set_cipher_list(ciphers) def load_verify_locations( self, cafile: str | None = None, capath: str | None = None, cadata: bytes | None = None, ) -> None: if cafile is not None: cafile = cafile.encode("utf-8") # type: ignore[assignment] if capath is not None: capath = capath.encode("utf-8") # type: ignore[assignment] try: self._ctx.load_verify_locations(cafile, capath) if cadata is not None: self._ctx.load_verify_locations(BytesIO(cadata)) except OpenSSL.SSL.Error as e: raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e def load_cert_chain( self, certfile: str, keyfile: str | None = None, password: str | None = None, ) -> None: try: self._ctx.use_certificate_chain_file(certfile) if password is not None: if not isinstance(password, bytes): password = password.encode("utf-8") # type: ignore[assignment] self._ctx.set_passwd_cb(lambda *_: password) self._ctx.use_privatekey_file(keyfile or certfile) except OpenSSL.SSL.Error as e: raise ssl.SSLError(f"Unable to load certificate chain: {e!r}") from e def set_alpn_protocols(self, protocols: list[bytes | str]) -> None: protocols = [util.util.to_bytes(p, "ascii") for p in protocols] return self._ctx.set_alpn_protos(protocols) # type: ignore[no-any-return] def wrap_socket( self, sock: socket_cls, server_side: bool = False, do_handshake_on_connect: bool = True, suppress_ragged_eofs: bool = True, server_hostname: bytes | str | None = None, ) -> WrappedSocket: cnx = OpenSSL.SSL.Connection(self._ctx, sock) # If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3 if server_hostname and not util.ssl_.is_ipaddress(server_hostname): if isinstance(server_hostname, str): server_hostname = server_hostname.encode("utf-8") cnx.set_tlsext_host_name(server_hostname) cnx.set_connect_state() while True: try: cnx.do_handshake() except OpenSSL.SSL.WantReadError as e: if not util.wait_for_read(sock, sock.gettimeout()): raise timeout("select timed out") from e continue except OpenSSL.SSL.Error as e: raise ssl.SSLError(f"bad handshake: {e!r}") from e break return WrappedSocket(cnx, sock) def _set_ctx_options(self) -> None: self._ctx.set_options( self._options | _openssl_to_ssl_minimum_version[self._minimum_version] | _openssl_to_ssl_maximum_version[self._maximum_version] ) @property def minimum_version(self) -> int: return self._minimum_version @minimum_version.setter def minimum_version(self, minimum_version: int) -> None: self._minimum_version = minimum_version self._set_ctx_options() @property def maximum_version(self) -> int: return self._maximum_version @maximum_version.setter def maximum_version(self, maximum_version: int) -> None: self._maximum_version = maximum_version self._set_ctx_options() def _verify_callback( cnx: OpenSSL.SSL.Connection, x509: X509, err_no: int, err_depth: int, return_code: int, ) -> bool: return err_no == 0 PKQZ]MIIcontrib/securetransport.pynu[""" SecureTranport support for urllib3 via ctypes. This makes platform-native TLS available to urllib3 users on macOS without the use of a compiler. This is an important feature because the Python Package Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL that ships with macOS is not capable of doing TLSv1.2. The only way to resolve this is to give macOS users an alternative solution to the problem, and that solution is to use SecureTransport. We use ctypes here because this solution must not require a compiler. That's because pip is not allowed to require a compiler either. This is not intended to be a seriously long-term solution to this problem. The hope is that PEP 543 will eventually solve this issue for us, at which point we can retire this contrib module. But in the short term, we need to solve the impending tire fire that is Python on Mac without this kind of contrib module. So...here we are. To use this module, simply import and inject it:: import urllib3.contrib.securetransport urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: .. code-block:: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import annotations import contextlib import ctypes import errno import os.path import shutil import socket import ssl import struct import threading import typing import warnings import weakref from socket import socket as socket_cls from .. import util from ._securetransport.bindings import ( # type: ignore[attr-defined] CoreFoundation, Security, ) from ._securetransport.low_level import ( SecurityConst, _assert_no_error, _build_tls_unknown_ca_alert, _cert_array_from_pem, _create_cfstring_array, _load_client_cert_chain, _temporary_keychain, ) warnings.warn( "'urllib3.contrib.securetransport' module is deprecated and will be removed " "in urllib3 v2.1.0. Read more in this issue: " "https://github.com/urllib3/urllib3/issues/2681", category=DeprecationWarning, stacklevel=2, ) if typing.TYPE_CHECKING: from typing_extensions import Literal __all__ = ["inject_into_urllib3", "extract_from_urllib3"] orig_util_SSLContext = util.ssl_.SSLContext # This dictionary is used by the read callback to obtain a handle to the # calling wrapped socket. This is a pretty silly approach, but for now it'll # do. I feel like I should be able to smuggle a handle to the wrapped socket # directly in the SSLConnectionRef, but for now this approach will work I # guess. # # We need to lock around this structure for inserts, but we don't do it for # reads/writes in the callbacks. The reasoning here goes as follows: # # 1. It is not possible to call into the callbacks before the dictionary is # populated, so once in the callback the id must be in the dictionary. # 2. The callbacks don't mutate the dictionary, they only read from it, and # so cannot conflict with any of the insertions. # # This is good: if we had to lock in the callbacks we'd drastically slow down # the performance of this code. _connection_refs: weakref.WeakValueDictionary[ int, WrappedSocket ] = weakref.WeakValueDictionary() _connection_ref_lock = threading.Lock() # Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over # for no better reason than we need *a* limit, and this one is right there. SSL_WRITE_BLOCKSIZE = 16384 # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. # TLSv1 to 1.2 are supported on macOS 10.8+ _protocol_to_min_max = { util.ssl_.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), # type: ignore[attr-defined] util.ssl_.PROTOCOL_TLS_CLIENT: ( # type: ignore[attr-defined] SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12, ), } if hasattr(ssl, "PROTOCOL_SSLv2"): _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2, ) if hasattr(ssl, "PROTOCOL_SSLv3"): _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3, ) if hasattr(ssl, "PROTOCOL_TLSv1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1, ) if hasattr(ssl, "PROTOCOL_TLSv1_1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11, ) if hasattr(ssl, "PROTOCOL_TLSv1_2"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12, ) _tls_version_to_st: dict[int, int] = { ssl.TLSVersion.MINIMUM_SUPPORTED: SecurityConst.kTLSProtocol1, ssl.TLSVersion.TLSv1: SecurityConst.kTLSProtocol1, ssl.TLSVersion.TLSv1_1: SecurityConst.kTLSProtocol11, ssl.TLSVersion.TLSv1_2: SecurityConst.kTLSProtocol12, ssl.TLSVersion.MAXIMUM_SUPPORTED: SecurityConst.kTLSProtocol12, } def inject_into_urllib3() -> None: """ Monkey-patch urllib3 with SecureTransport-backed SSL-support. """ util.SSLContext = SecureTransportContext # type: ignore[assignment] util.ssl_.SSLContext = SecureTransportContext # type: ignore[assignment] util.IS_SECURETRANSPORT = True util.ssl_.IS_SECURETRANSPORT = True def extract_from_urllib3() -> None: """ Undo monkey-patching by :func:`inject_into_urllib3`. """ util.SSLContext = orig_util_SSLContext util.ssl_.SSLContext = orig_util_SSLContext util.IS_SECURETRANSPORT = False util.ssl_.IS_SECURETRANSPORT = False def _read_callback( connection_id: int, data_buffer: int, data_length_pointer: bytearray ) -> int: """ SecureTransport read callback. This is called by ST to request that data be returned from the socket. """ wrapped_socket = None try: wrapped_socket = _connection_refs.get(connection_id) if wrapped_socket is None: return SecurityConst.errSSLInternal base_socket = wrapped_socket.socket requested_length = data_length_pointer[0] timeout = wrapped_socket.gettimeout() error = None read_count = 0 try: while read_count < requested_length: if timeout is None or timeout >= 0: if not util.wait_for_read(base_socket, timeout): raise OSError(errno.EAGAIN, "timed out") remaining = requested_length - read_count buffer = (ctypes.c_char * remaining).from_address( data_buffer + read_count ) chunk_size = base_socket.recv_into(buffer, remaining) read_count += chunk_size if not chunk_size: if not read_count: return SecurityConst.errSSLClosedGraceful break except OSError as e: error = e.errno if error is not None and error != errno.EAGAIN: data_length_pointer[0] = read_count if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedAbort raise data_length_pointer[0] = read_count if read_count != requested_length: return SecurityConst.errSSLWouldBlock return 0 except Exception as e: if wrapped_socket is not None: wrapped_socket._exception = e return SecurityConst.errSSLInternal def _write_callback( connection_id: int, data_buffer: int, data_length_pointer: bytearray ) -> int: """ SecureTransport write callback. This is called by ST to request that data actually be sent on the network. """ wrapped_socket = None try: wrapped_socket = _connection_refs.get(connection_id) if wrapped_socket is None: return SecurityConst.errSSLInternal base_socket = wrapped_socket.socket bytes_to_write = data_length_pointer[0] data = ctypes.string_at(data_buffer, bytes_to_write) timeout = wrapped_socket.gettimeout() error = None sent = 0 try: while sent < bytes_to_write: if timeout is None or timeout >= 0: if not util.wait_for_write(base_socket, timeout): raise OSError(errno.EAGAIN, "timed out") chunk_sent = base_socket.send(data) sent += chunk_sent # This has some needless copying here, but I'm not sure there's # much value in optimising this data path. data = data[chunk_sent:] except OSError as e: error = e.errno if error is not None and error != errno.EAGAIN: data_length_pointer[0] = sent if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedAbort raise data_length_pointer[0] = sent if sent != bytes_to_write: return SecurityConst.errSSLWouldBlock return 0 except Exception as e: if wrapped_socket is not None: wrapped_socket._exception = e return SecurityConst.errSSLInternal # We need to keep these two objects references alive: if they get GC'd while # in use then SecureTransport could attempt to call a function that is in freed # memory. That would be...uh...bad. Yeah, that's the word. Bad. _read_callback_pointer = Security.SSLReadFunc(_read_callback) _write_callback_pointer = Security.SSLWriteFunc(_write_callback) class WrappedSocket: """ API-compatibility wrapper for Python's OpenSSL wrapped socket object. """ def __init__(self, socket: socket_cls) -> None: self.socket = socket self.context = None self._io_refs = 0 self._closed = False self._real_closed = False self._exception: Exception | None = None self._keychain = None self._keychain_dir: str | None = None self._client_cert_chain = None # We save off the previously-configured timeout and then set it to # zero. This is done because we use select and friends to handle the # timeouts, but if we leave the timeout set on the lower socket then # Python will "kindly" call select on that socket again for us. Avoid # that by forcing the timeout to zero. self._timeout = self.socket.gettimeout() self.socket.settimeout(0) @contextlib.contextmanager def _raise_on_error(self) -> typing.Generator[None, None, None]: """ A context manager that can be used to wrap calls that do I/O from SecureTransport. If any of the I/O callbacks hit an exception, this context manager will correctly propagate the exception after the fact. This avoids silently swallowing those exceptions. It also correctly forces the socket closed. """ self._exception = None # We explicitly don't catch around this yield because in the unlikely # event that an exception was hit in the block we don't want to swallow # it. yield if self._exception is not None: exception, self._exception = self._exception, None self._real_close() raise exception def _set_alpn_protocols(self, protocols: list[bytes] | None) -> None: """ Sets up the ALPN protocols on the context. """ if not protocols: return protocols_arr = _create_cfstring_array(protocols) try: result = Security.SSLSetALPNProtocols(self.context, protocols_arr) _assert_no_error(result) finally: CoreFoundation.CFRelease(protocols_arr) def _custom_validate(self, verify: bool, trust_bundle: bytes | None) -> None: """ Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. Raises an SSLError if the connection is not trusted. """ # If we disabled cert validation, just say: cool. if not verify or trust_bundle is None: return successes = ( SecurityConst.kSecTrustResultUnspecified, SecurityConst.kSecTrustResultProceed, ) try: trust_result = self._evaluate_trust(trust_bundle) if trust_result in successes: return reason = f"error code: {int(trust_result)}" exc = None except Exception as e: # Do not trust on error reason = f"exception: {e!r}" exc = e # SecureTransport does not send an alert nor shuts down the connection. rec = _build_tls_unknown_ca_alert(self.version()) self.socket.sendall(rec) # close the connection immediately # l_onoff = 1, activate linger # l_linger = 0, linger for 0 seoncds opts = struct.pack("ii", 1, 0) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) self._real_close() raise ssl.SSLError(f"certificate verify failed, {reason}") from exc def _evaluate_trust(self, trust_bundle: bytes) -> int: # We want data in memory, so load it up. if os.path.isfile(trust_bundle): with open(trust_bundle, "rb") as f: trust_bundle = f.read() cert_array = None trust = Security.SecTrustRef() try: # Get a CFArray that contains the certs we want. cert_array = _cert_array_from_pem(trust_bundle) # Ok, now the hard part. We want to get the SecTrustRef that ST has # created for this connection, shove our CAs into it, tell ST to # ignore everything else it knows, and then ask if it can build a # chain. This is a buuuunch of code. result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: raise ssl.SSLError("Failed to copy trust reference") result = Security.SecTrustSetAnchorCertificates(trust, cert_array) _assert_no_error(result) result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) _assert_no_error(result) trust_result = Security.SecTrustResultType() result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) _assert_no_error(result) finally: if trust: CoreFoundation.CFRelease(trust) if cert_array is not None: CoreFoundation.CFRelease(cert_array) return trust_result.value # type: ignore[no-any-return] def handshake( self, server_hostname: bytes | str | None, verify: bool, trust_bundle: bytes | None, min_version: int, max_version: int, client_cert: str | None, client_key: str | None, client_key_passphrase: typing.Any, alpn_protocols: list[bytes] | None, ) -> None: """ Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. """ # First, we do the initial bits of connection setup. We need to create # a context, set its I/O funcs, and set the connection reference. self.context = Security.SSLCreateContext( None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType ) result = Security.SSLSetIOFuncs( self.context, _read_callback_pointer, _write_callback_pointer ) _assert_no_error(result) # Here we need to compute the handle to use. We do this by taking the # id of self modulo 2**31 - 1. If this is already in the dictionary, we # just keep incrementing by one until we find a free space. with _connection_ref_lock: handle = id(self) % 2147483647 while handle in _connection_refs: handle = (handle + 1) % 2147483647 _connection_refs[handle] = self result = Security.SSLSetConnection(self.context, handle) _assert_no_error(result) # If we have a server hostname, we should set that too. # RFC6066 Section 3 tells us not to use SNI when the host is an IP, but we have # to do it anyway to match server_hostname against the server certificate if server_hostname: if not isinstance(server_hostname, bytes): server_hostname = server_hostname.encode("utf-8") result = Security.SSLSetPeerDomainName( self.context, server_hostname, len(server_hostname) ) _assert_no_error(result) # Setup the ALPN protocols. self._set_alpn_protocols(alpn_protocols) # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) result = Security.SSLSetProtocolVersionMax(self.context, max_version) _assert_no_error(result) # If there's a trust DB, we need to use it. We do that by telling # SecureTransport to break on server auth. We also do that if we don't # want to validate the certs at all: we just won't actually do any # authing in that case. if not verify or trust_bundle is not None: result = Security.SSLSetSessionOption( self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True ) _assert_no_error(result) # If there's a client cert, we need to use it. if client_cert: self._keychain, self._keychain_dir = _temporary_keychain() self._client_cert_chain = _load_client_cert_chain( self._keychain, client_cert, client_key ) result = Security.SSLSetCertificate(self.context, self._client_cert_chain) _assert_no_error(result) while True: with self._raise_on_error(): result = Security.SSLHandshake(self.context) if result == SecurityConst.errSSLWouldBlock: raise socket.timeout("handshake timed out") elif result == SecurityConst.errSSLServerAuthCompleted: self._custom_validate(verify, trust_bundle) continue else: _assert_no_error(result) break def fileno(self) -> int: return self.socket.fileno() # Copy-pasted from Python 3.5 source code def _decref_socketios(self) -> None: if self._io_refs > 0: self._io_refs -= 1 if self._closed: self.close() def recv(self, bufsiz: int) -> bytes: buffer = ctypes.create_string_buffer(bufsiz) bytes_read = self.recv_into(buffer, bufsiz) data = buffer[:bytes_read] return typing.cast(bytes, data) def recv_into( self, buffer: ctypes.Array[ctypes.c_char], nbytes: int | None = None ) -> int: # Read short on EOF. if self._real_closed: return 0 if nbytes is None: nbytes = len(buffer) buffer = (ctypes.c_char * nbytes).from_buffer(buffer) processed_bytes = ctypes.c_size_t(0) with self._raise_on_error(): result = Security.SSLRead( self.context, buffer, nbytes, ctypes.byref(processed_bytes) ) # There are some result codes that we want to treat as "not always # errors". Specifically, those are errSSLWouldBlock, # errSSLClosedGraceful, and errSSLClosedNoNotify. if result == SecurityConst.errSSLWouldBlock: # If we didn't process any bytes, then this was just a time out. # However, we can get errSSLWouldBlock in situations when we *did* # read some data, and in those cases we should just read "short" # and return. if processed_bytes.value == 0: # Timed out, no data read. raise socket.timeout("recv timed out") elif result in ( SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify, ): # The remote peer has closed this connection. We should do so as # well. Note that we don't actually return here because in # principle this could actually be fired along with return data. # It's unlikely though. self._real_close() else: _assert_no_error(result) # Ok, we read and probably succeeded. We should return whatever data # was actually read. return processed_bytes.value def settimeout(self, timeout: float) -> None: self._timeout = timeout def gettimeout(self) -> float | None: return self._timeout def send(self, data: bytes) -> int: processed_bytes = ctypes.c_size_t(0) with self._raise_on_error(): result = Security.SSLWrite( self.context, data, len(data), ctypes.byref(processed_bytes) ) if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: # Timed out raise socket.timeout("send timed out") else: _assert_no_error(result) # We sent, and probably succeeded. Tell them how much we sent. return processed_bytes.value def sendall(self, data: bytes) -> None: total_sent = 0 while total_sent < len(data): sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self) -> None: with self._raise_on_error(): Security.SSLClose(self.context) def close(self) -> None: self._closed = True # TODO: should I do clean shutdown here? Do I have to? if self._io_refs <= 0: self._real_close() def _real_close(self) -> None: self._real_closed = True if self.context: CoreFoundation.CFRelease(self.context) self.context = None if self._client_cert_chain: CoreFoundation.CFRelease(self._client_cert_chain) self._client_cert_chain = None if self._keychain: Security.SecKeychainDelete(self._keychain) CoreFoundation.CFRelease(self._keychain) shutil.rmtree(self._keychain_dir) self._keychain = self._keychain_dir = None return self.socket.close() def getpeercert(self, binary_form: bool = False) -> bytes | None: # Urgh, annoying. # # Here's how we do this: # # 1. Call SSLCopyPeerTrust to get hold of the trust object for this # connection. # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. # 3. To get the CN, call SecCertificateCopyCommonName and process that # string so that it's of the appropriate type. # 4. To get the SAN, we need to do something a bit more complex: # a. Call SecCertificateCopyValues to get the data, requesting # kSecOIDSubjectAltName. # b. Mess about with this dictionary to try to get the SANs out. # # This is gross. Really gross. It's going to be a few hundred LoC extra # just to repeat something that SecureTransport can *already do*. So my # operating assumption at this time is that what we want to do is # instead to just flag to urllib3 that it shouldn't do its own hostname # validation when using SecureTransport. if not binary_form: raise ValueError("SecureTransport only supports dumping binary certs") trust = Security.SecTrustRef() certdata = None der_bytes = None try: # Grab the trust store. result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) _assert_no_error(result) if not trust: # Probably we haven't done the handshake yet. No biggie. return None cert_count = Security.SecTrustGetCertificateCount(trust) if not cert_count: # Also a case that might happen if we haven't handshaked. # Handshook? Handshaken? return None leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) assert leaf # Ok, now we want the DER bytes. certdata = Security.SecCertificateCopyData(leaf) assert certdata data_length = CoreFoundation.CFDataGetLength(certdata) data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) der_bytes = ctypes.string_at(data_buffer, data_length) finally: if certdata: CoreFoundation.CFRelease(certdata) if trust: CoreFoundation.CFRelease(trust) return der_bytes def version(self) -> str: protocol = Security.SSLProtocol() result = Security.SSLGetNegotiatedProtocolVersion( self.context, ctypes.byref(protocol) ) _assert_no_error(result) if protocol.value == SecurityConst.kTLSProtocol13: raise ssl.SSLError("SecureTransport does not support TLS 1.3") elif protocol.value == SecurityConst.kTLSProtocol12: return "TLSv1.2" elif protocol.value == SecurityConst.kTLSProtocol11: return "TLSv1.1" elif protocol.value == SecurityConst.kTLSProtocol1: return "TLSv1" elif protocol.value == SecurityConst.kSSLProtocol3: return "SSLv3" elif protocol.value == SecurityConst.kSSLProtocol2: return "SSLv2" else: raise ssl.SSLError(f"Unknown TLS version: {protocol!r}") def makefile( self: socket_cls, mode: ( Literal["r"] | Literal["w"] | Literal["rw"] | Literal["wr"] | Literal[""] ) = "r", buffering: int | None = None, *args: typing.Any, **kwargs: typing.Any, ) -> typing.BinaryIO | typing.TextIO: # We disable buffering with SecureTransport because it conflicts with # the buffering that ST does internally (see issue #1153 for more). buffering = 0 return socket_cls.makefile(self, mode, buffering, *args, **kwargs) WrappedSocket.makefile = makefile # type: ignore[attr-defined] class SecureTransportContext: """ I am a wrapper class for the SecureTransport library, to translate the interface of the standard library ``SSLContext`` object to calls into SecureTransport. """ def __init__(self, protocol: int) -> None: self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED if protocol not in (None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT): self._min_version, self._max_version = _protocol_to_min_max[protocol] self._options = 0 self._verify = False self._trust_bundle: bytes | None = None self._client_cert: str | None = None self._client_key: str | None = None self._client_key_passphrase = None self._alpn_protocols: list[bytes] | None = None @property def check_hostname(self) -> Literal[True]: """ SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. """ return True @check_hostname.setter def check_hostname(self, value: typing.Any) -> None: """ SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. """ @property def options(self) -> int: # TODO: Well, crap. # # So this is the bit of the code that is the most likely to cause us # trouble. Essentially we need to enumerate all of the SSL options that # users might want to use and try to see if we can sensibly translate # them, or whether we should just ignore them. return self._options @options.setter def options(self, value: int) -> None: # TODO: Update in line with above. self._options = value @property def verify_mode(self) -> int: return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE @verify_mode.setter def verify_mode(self, value: int) -> None: self._verify = value == ssl.CERT_REQUIRED def set_default_verify_paths(self) -> None: # So, this has to do something a bit weird. Specifically, what it does # is nothing. # # This means that, if we had previously had load_verify_locations # called, this does not undo that. We need to do that because it turns # out that the rest of the urllib3 code will attempt to load the # default verify paths if it hasn't been told about any paths, even if # the context itself was sometime earlier. We resolve that by just # ignoring it. pass def load_default_certs(self) -> None: return self.set_default_verify_paths() def set_ciphers(self, ciphers: typing.Any) -> None: raise ValueError("SecureTransport doesn't support custom cipher strings") def load_verify_locations( self, cafile: str | None = None, capath: str | None = None, cadata: bytes | None = None, ) -> None: # OK, we only really support cadata and cafile. if capath is not None: raise ValueError("SecureTransport does not support cert directories") # Raise if cafile does not exist. if cafile is not None: with open(cafile): pass self._trust_bundle = cafile or cadata # type: ignore[assignment] def load_cert_chain( self, certfile: str, keyfile: str | None = None, password: str | None = None, ) -> None: self._client_cert = certfile self._client_key = keyfile self._client_cert_passphrase = password def set_alpn_protocols(self, protocols: list[str | bytes]) -> None: """ Sets the ALPN protocols that will later be set on the context. Raises a NotImplementedError if ALPN is not supported. """ if not hasattr(Security, "SSLSetALPNProtocols"): raise NotImplementedError( "SecureTransport supports ALPN only in macOS 10.12+" ) self._alpn_protocols = [util.util.to_bytes(p, "ascii") for p in protocols] def wrap_socket( self, sock: socket_cls, server_side: bool = False, do_handshake_on_connect: bool = True, suppress_ragged_eofs: bool = True, server_hostname: bytes | str | None = None, ) -> WrappedSocket: # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. assert not server_side assert do_handshake_on_connect assert suppress_ragged_eofs # Ok, we're good to go. Now we want to create the wrapped socket object # and store it in the appropriate place. wrapped_socket = WrappedSocket(sock) # Now we can handshake wrapped_socket.handshake( server_hostname, self._verify, self._trust_bundle, _tls_version_to_st[self._minimum_version], _tls_version_to_st[self._maximum_version], self._client_cert, self._client_key, self._client_key_passphrase, self._alpn_protocols, ) return wrapped_socket @property def minimum_version(self) -> int: return self._minimum_version @minimum_version.setter def minimum_version(self, minimum_version: int) -> None: self._minimum_version = minimum_version @property def maximum_version(self) -> int: return self._maximum_version @maximum_version.setter def maximum_version(self, maximum_version: int) -> None: self._maximum_version = maximum_version PKQZ潌##contrib/socks.pynu[""" This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: - SOCKS4A (``proxy_url='socks4a://...``) - SOCKS4 (``proxy_url='socks4://...``) - SOCKS5 with remote DNS (``proxy_url='socks5h://...``) - SOCKS5 with local DNS (``proxy_url='socks5://...``) - Usernames and passwords for the SOCKS proxy .. note:: It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in your ``proxy_url`` to ensure that DNS resolution is done from the remote server instead of client-side when connecting to a domain name. SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 supports IPv4, IPv6, and domain names. When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` will be sent as the ``userid`` section of the SOCKS request: .. code-block:: python proxy_url="socks4a://@proxy-host" When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion of the ``proxy_url`` will be sent as the username/password to authenticate with the proxy: .. code-block:: python proxy_url="socks5h://:@proxy-host" """ from __future__ import annotations try: import socks # type: ignore[import] except ImportError: import warnings from ..exceptions import DependencyWarning warnings.warn( ( "SOCKS support in urllib3 requires the installation of optional " "dependencies: specifically, PySocks. For more information, see " "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies" ), DependencyWarning, ) raise import typing from socket import timeout as SocketTimeout from ..connection import HTTPConnection, HTTPSConnection from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool from ..exceptions import ConnectTimeoutError, NewConnectionError from ..poolmanager import PoolManager from ..util.url import parse_url try: import ssl except ImportError: ssl = None # type: ignore[assignment] try: from typing import TypedDict class _TYPE_SOCKS_OPTIONS(TypedDict): socks_version: int proxy_host: str | None proxy_port: str | None username: str | None password: str | None rdns: bool except ImportError: # Python 3.7 _TYPE_SOCKS_OPTIONS = typing.Dict[str, typing.Any] # type: ignore[misc, assignment] class SOCKSConnection(HTTPConnection): """ A plain-text HTTP connection that connects via a SOCKS proxy. """ def __init__( self, _socks_options: _TYPE_SOCKS_OPTIONS, *args: typing.Any, **kwargs: typing.Any, ) -> None: self._socks_options = _socks_options super().__init__(*args, **kwargs) def _new_conn(self) -> socks.socksocket: """ Establish a new connection via the SOCKS proxy. """ extra_kw: dict[str, typing.Any] = {} if self.source_address: extra_kw["source_address"] = self.source_address if self.socket_options: extra_kw["socket_options"] = self.socket_options try: conn = socks.create_connection( (self.host, self.port), proxy_type=self._socks_options["socks_version"], proxy_addr=self._socks_options["proxy_host"], proxy_port=self._socks_options["proxy_port"], proxy_username=self._socks_options["username"], proxy_password=self._socks_options["password"], proxy_rdns=self._socks_options["rdns"], timeout=self.timeout, **extra_kw, ) except SocketTimeout as e: raise ConnectTimeoutError( self, f"Connection to {self.host} timed out. (connect timeout={self.timeout})", ) from e except socks.ProxyError as e: # This is fragile as hell, but it seems to be the only way to raise # useful errors here. if e.socket_err: error = e.socket_err if isinstance(error, SocketTimeout): raise ConnectTimeoutError( self, f"Connection to {self.host} timed out. (connect timeout={self.timeout})", ) from e else: # Adding `from e` messes with coverage somehow, so it's omitted. # See #2386. raise NewConnectionError( self, f"Failed to establish a new connection: {error}" ) else: raise NewConnectionError( self, f"Failed to establish a new connection: {e}" ) from e except OSError as e: # Defensive: PySocks should catch all these. raise NewConnectionError( self, f"Failed to establish a new connection: {e}" ) from e return conn # We don't need to duplicate the Verified/Unverified distinction from # urllib3/connection.py here because the HTTPSConnection will already have been # correctly set to either the Verified or Unverified form by that module. This # means the SOCKSHTTPSConnection will automatically be the correct type. class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): pass class SOCKSHTTPConnectionPool(HTTPConnectionPool): ConnectionCls = SOCKSConnection class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): ConnectionCls = SOCKSHTTPSConnection class SOCKSProxyManager(PoolManager): """ A version of the urllib3 ProxyManager that routes connections via the defined SOCKS proxy. """ pool_classes_by_scheme = { "http": SOCKSHTTPConnectionPool, "https": SOCKSHTTPSConnectionPool, } def __init__( self, proxy_url: str, username: str | None = None, password: str | None = None, num_pools: int = 10, headers: typing.Mapping[str, str] | None = None, **connection_pool_kw: typing.Any, ): parsed = parse_url(proxy_url) if username is None and password is None and parsed.auth is not None: split = parsed.auth.split(":") if len(split) == 2: username, password = split if parsed.scheme == "socks5": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = False elif parsed.scheme == "socks5h": socks_version = socks.PROXY_TYPE_SOCKS5 rdns = True elif parsed.scheme == "socks4": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = False elif parsed.scheme == "socks4a": socks_version = socks.PROXY_TYPE_SOCKS4 rdns = True else: raise ValueError(f"Unable to determine SOCKS version from {proxy_url}") self.proxy_url = proxy_url socks_options = { "socks_version": socks_version, "proxy_host": parsed.host, "proxy_port": parsed.port, "username": username, "password": password, "rdns": rdns, } connection_pool_kw["_socks_options"] = socks_options super().__init__(num_pools, headers, **connection_pool_kw) self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme PKQZ#$$ exceptions.pynu[from __future__ import annotations import socket import typing import warnings from email.errors import MessageDefect from http.client import IncompleteRead as httplib_IncompleteRead if typing.TYPE_CHECKING: from .connection import HTTPConnection from .connectionpool import ConnectionPool from .response import HTTPResponse from .util.retry import Retry # Base Exceptions class HTTPError(Exception): """Base exception used by this module.""" class HTTPWarning(Warning): """Base warning used by this module.""" _TYPE_REDUCE_RESULT = typing.Tuple[ typing.Callable[..., object], typing.Tuple[object, ...] ] class PoolError(HTTPError): """Base exception for errors caused within a pool.""" def __init__(self, pool: ConnectionPool, message: str) -> None: self.pool = pool super().__init__(f"{pool}: {message}") def __reduce__(self) -> _TYPE_REDUCE_RESULT: # For pickling purposes. return self.__class__, (None, None) class RequestError(PoolError): """Base exception for PoolErrors that have associated URLs.""" def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: self.url = url super().__init__(pool, message) def __reduce__(self) -> _TYPE_REDUCE_RESULT: # For pickling purposes. return self.__class__, (None, self.url, None) class SSLError(HTTPError): """Raised when SSL certificate fails in an HTTPS connection.""" class ProxyError(HTTPError): """Raised when the connection to a proxy fails.""" # The original error is also available as __cause__. original_error: Exception def __init__(self, message: str, error: Exception) -> None: super().__init__(message, error) self.original_error = error class DecodeError(HTTPError): """Raised when automatic decoding based on Content-Type fails.""" class ProtocolError(HTTPError): """Raised when something unexpected happens mid-request/response.""" #: Renamed to ProtocolError but aliased for backwards compatibility. ConnectionError = ProtocolError # Leaf Exceptions class MaxRetryError(RequestError): """Raised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param str url: The requested Url :param reason: The underlying error :type reason: :class:`Exception` """ def __init__( self, pool: ConnectionPool, url: str, reason: Exception | None = None ) -> None: self.reason = reason message = f"Max retries exceeded with url: {url} (Caused by {reason!r})" super().__init__(pool, url, message) class HostChangedError(RequestError): """Raised when an existing pool gets a request for a foreign host.""" def __init__( self, pool: ConnectionPool, url: str, retries: Retry | int = 3 ) -> None: message = f"Tried to open a foreign host with url: {url}" super().__init__(pool, url, message) self.retries = retries class TimeoutStateError(HTTPError): """Raised when passing an invalid state to a timeout""" class TimeoutError(HTTPError): """Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. """ class ReadTimeoutError(TimeoutError, RequestError): """Raised when a socket timeout occurs while receiving data from a server""" # This timeout error does not have a URL attached and needs to inherit from the # base HTTPError class ConnectTimeoutError(TimeoutError): """Raised when a socket timeout occurs while connecting to a server""" class NewConnectionError(ConnectTimeoutError, HTTPError): """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" def __init__(self, conn: HTTPConnection, message: str) -> None: self.conn = conn super().__init__(f"{conn}: {message}") @property def pool(self) -> HTTPConnection: warnings.warn( "The 'pool' property is deprecated and will be removed " "in urllib3 v2.1.0. Use 'conn' instead.", DeprecationWarning, stacklevel=2, ) return self.conn class NameResolutionError(NewConnectionError): """Raised when host name resolution fails.""" def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror): message = f"Failed to resolve '{host}' ({reason})" super().__init__(conn, message) class EmptyPoolError(PoolError): """Raised when a pool runs out of connections and no more are allowed.""" class FullPoolError(PoolError): """Raised when we try to add a connection to a full pool in blocking mode.""" class ClosedPoolError(PoolError): """Raised when a request enters a pool after the pool has been closed.""" class LocationValueError(ValueError, HTTPError): """Raised when there is something wrong with a given URL input.""" class LocationParseError(LocationValueError): """Raised when get_host or similar fails to parse the URL input.""" def __init__(self, location: str) -> None: message = f"Failed to parse: {location}" super().__init__(message) self.location = location class URLSchemeUnknown(LocationValueError): """Raised when a URL input has an unsupported scheme.""" def __init__(self, scheme: str): message = f"Not supported URL scheme {scheme}" super().__init__(message) self.scheme = scheme class ResponseError(HTTPError): """Used as a container for an error reason supplied in a MaxRetryError.""" GENERIC_ERROR = "too many error responses" SPECIFIC_ERROR = "too many {status_code} error responses" class SecurityWarning(HTTPWarning): """Warned when performing security reducing actions""" class InsecureRequestWarning(SecurityWarning): """Warned when making an unverified HTTPS request.""" class NotOpenSSLWarning(SecurityWarning): """Warned when using unsupported SSL library""" class SystemTimeWarning(SecurityWarning): """Warned when system time is suspected to be wrong""" class InsecurePlatformWarning(SecurityWarning): """Warned when certain TLS/SSL configuration is not available on a platform.""" class DependencyWarning(HTTPWarning): """ Warned when an attempt is made to import a module with missing optional dependencies. """ class ResponseNotChunked(ProtocolError, ValueError): """Response needs to be chunked in order to read it as chunks.""" class BodyNotHttplibCompatible(HTTPError): """ Body should be :class:`http.client.HTTPResponse` like (have an fp attribute which returns raw chunks) for read_chunked(). """ class IncompleteRead(HTTPError, httplib_IncompleteRead): """ Response length doesn't match expected Content-Length Subclass of :class:`http.client.IncompleteRead` to allow int value for ``partial`` to avoid creating large objects on streamed reads. """ def __init__(self, partial: int, expected: int) -> None: self.partial = partial # type: ignore[assignment] self.expected = expected def __repr__(self) -> str: return "IncompleteRead(%i bytes read, %i more expected)" % ( self.partial, # type: ignore[str-format] self.expected, ) class InvalidChunkLength(HTTPError, httplib_IncompleteRead): """Invalid chunk length in a chunked response.""" def __init__(self, response: HTTPResponse, length: bytes) -> None: self.partial: int = response.tell() # type: ignore[assignment] self.expected: int | None = response.length_remaining self.response = response self.length = length def __repr__(self) -> str: return "InvalidChunkLength(got length %r, %i bytes read)" % ( self.length, self.partial, ) class InvalidHeader(HTTPError): """The header provided was somehow invalid.""" class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): """ProxyManager does not support the supplied scheme""" # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. def __init__(self, scheme: str | None) -> None: # 'localhost' is here because our URL parser parses # localhost:8080 -> scheme=localhost, remove if we fix this. if scheme == "localhost": scheme = None if scheme is None: message = "Proxy URL had no scheme, should start with http:// or https://" else: message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://" super().__init__(message) class ProxySchemeUnsupported(ValueError): """Fetching HTTPS resources through HTTPS proxies is unsupported""" class HeaderParsingError(HTTPError): """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" def __init__( self, defects: list[MessageDefect], unparsed_data: bytes | str | None ) -> None: message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}" super().__init__(message) class UnrewindableBodyError(HTTPError): """urllib3 encountered an error when trying to rewind a body""" PKQZoLs++ fields.pynu[from __future__ import annotations import email.utils import mimetypes import typing _TYPE_FIELD_VALUE = typing.Union[str, bytes] _TYPE_FIELD_VALUE_TUPLE = typing.Union[ _TYPE_FIELD_VALUE, typing.Tuple[str, _TYPE_FIELD_VALUE], typing.Tuple[str, _TYPE_FIELD_VALUE, str], ] def guess_content_type( filename: str | None, default: str = "application/octet-stream" ) -> str: """ Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ if filename: return mimetypes.guess_type(filename)[0] or default return default def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str: """ Helper function to format and quote a single header parameter using the strategy defined in RFC 2231. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows `RFC 2388 Section 4.4 `_. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as ``bytes`` or `str``. :returns: An RFC-2231-formatted unicode string. .. deprecated:: 2.0.0 Will be removed in urllib3 v2.1.0. This is not valid for ``multipart/form-data`` header parameters. """ import warnings warnings.warn( "'format_header_param_rfc2231' is deprecated and will be " "removed in urllib3 v2.1.0. This is not valid for " "multipart/form-data header parameters.", DeprecationWarning, stacklevel=2, ) if isinstance(value, bytes): value = value.decode("utf-8") if not any(ch in value for ch in '"\\\r\n'): result = f'{name}="{value}"' try: result.encode("ascii") except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result value = email.utils.encode_rfc2231(value, "utf-8") value = f"{name}*={value}" return value def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: """ Format and quote a single multipart header parameter. This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching the behavior of current browser and curl versions. Values are assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are percent encoded. .. _WHATWG HTML Standard: https://html.spec.whatwg.org/multipage/ form-control-infrastructure.html#multipart-form-data :param name: The name of the parameter, an ASCII-only ``str``. :param value: The value of the parameter, a ``str`` or UTF-8 encoded ``bytes``. :returns: A string ``name="value"`` with the escaped value. .. versionchanged:: 2.0.0 Matches the WHATWG HTML Standard as of 2021/06/10. Control characters are no longer percent encoded. .. versionchanged:: 2.0.0 Renamed from ``format_header_param_html5`` and ``format_header_param``. The old names will be removed in urllib3 v2.1.0. """ if isinstance(value, bytes): value = value.decode("utf-8") # percent encode \n \r " value = value.translate({10: "%0A", 13: "%0D", 34: "%22"}) return f'{name}="{value}"' def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str: """ .. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v2.1.0. """ import warnings warnings.warn( "'format_header_param_html5' has been renamed to " "'format_multipart_header_param'. The old name will be " "removed in urllib3 v2.1.0.", DeprecationWarning, stacklevel=2, ) return format_multipart_header_param(name, value) def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: """ .. deprecated:: 2.0.0 Renamed to :func:`format_multipart_header_param`. Will be removed in urllib3 v2.1.0. """ import warnings warnings.warn( "'format_header_param' has been renamed to " "'format_multipart_header_param'. The old name will be " "removed in urllib3 v2.1.0.", DeprecationWarning, stacklevel=2, ) return format_multipart_header_param(name, value) class RequestField: """ A data container for request body parameters. :param name: The name of this request field. Must be unicode. :param data: The data/value body. :param filename: An optional filename of the request field. Must be unicode. :param headers: An optional dict-like object of headers to initially use for the field. .. versionchanged:: 2.0.0 The ``header_formatter`` parameter is deprecated and will be removed in urllib3 v2.1.0. """ def __init__( self, name: str, data: _TYPE_FIELD_VALUE, filename: str | None = None, headers: typing.Mapping[str, str] | None = None, header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, ): self._name = name self._filename = filename self.data = data self.headers: dict[str, str | None] = {} if headers: self.headers = dict(headers) if header_formatter is not None: import warnings warnings.warn( "The 'header_formatter' parameter is deprecated and " "will be removed in urllib3 v2.1.0.", DeprecationWarning, stacklevel=2, ) self.header_formatter = header_formatter else: self.header_formatter = format_multipart_header_param @classmethod def from_tuples( cls, fieldname: str, value: _TYPE_FIELD_VALUE_TUPLE, header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, ) -> RequestField: """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """ filename: str | None content_type: str | None data: _TYPE_FIELD_VALUE if isinstance(value, tuple): if len(value) == 3: filename, data, content_type = typing.cast( typing.Tuple[str, _TYPE_FIELD_VALUE, str], value ) else: filename, data = typing.cast( typing.Tuple[str, _TYPE_FIELD_VALUE], value ) content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls( fieldname, data, filename=filename, header_formatter=header_formatter ) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str: """ Override this method to change how each multipart header parameter is formatted. By default, this calls :func:`format_multipart_header_param`. :param name: The name of the parameter, an ASCII-only ``str``. :param value: The value of the parameter, a ``str`` or UTF-8 encoded ``bytes``. :meta public: """ return self.header_formatter(name, value) def _render_parts( self, header_parts: ( dict[str, _TYPE_FIELD_VALUE | None] | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]] ), ) -> str: """ Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., 'Content-Disposition' fields. :param header_parts: A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. """ iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]] parts = [] if isinstance(header_parts, dict): iterable = header_parts.items() else: iterable = header_parts for name, value in iterable: if value is not None: parts.append(self._render_part(name, value)) return "; ".join(parts) def render_headers(self) -> str: """ Renders the headers for this request field. """ lines = [] sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append(f"{sort_key}: {self.headers[sort_key]}") for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: lines.append(f"{header_name}: {header_value}") lines.append("\r\n") return "\r\n".join(lines) def make_multipart( self, content_disposition: str | None = None, content_type: str | None = None, content_location: str | None = None, ) -> None: """ Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_disposition: The 'Content-Disposition' of the request body. Defaults to 'form-data' :param content_type: The 'Content-Type' of the request body. :param content_location: The 'Content-Location' of the request body. """ content_disposition = (content_disposition or "form-data") + "; ".join( [ "", self._render_parts( (("name", self._name), ("filename", self._filename)) ), ] ) self.headers["Content-Disposition"] = content_disposition self.headers["Content-Type"] = content_type self.headers["Content-Location"] = content_location PKQZ洖[ [ filepost.pynu[from __future__ import annotations import binascii import codecs import os import typing from io import BytesIO from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField writer = codecs.lookup("utf-8")[3] _TYPE_FIELDS_SEQUENCE = typing.Sequence[ typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] ] _TYPE_FIELDS = typing.Union[ _TYPE_FIELDS_SEQUENCE, typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE], ] def choose_boundary() -> str: """ Our embarrassingly-simple replacement for mimetools.choose_boundary. """ return binascii.hexlify(os.urandom(16)).decode() def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]: """ Iterate over fields. Supports list of (k, v) tuples and dicts, and lists of :class:`~urllib3.fields.RequestField`. """ iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]] if isinstance(fields, typing.Mapping): iterable = fields.items() else: iterable = fields for field in iterable: if isinstance(field, RequestField): yield field else: yield RequestField.from_tuples(*field) def encode_multipart_formdata( fields: _TYPE_FIELDS, boundary: str | None = None ) -> tuple[bytes, str]: """ Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). Values are processed by :func:`urllib3.fields.RequestField.from_tuples`. :param boundary: If not specified, then a random boundary will be generated using :func:`urllib3.filepost.choose_boundary`. """ body = BytesIO() if boundary is None: boundary = choose_boundary() for field in iter_field_objects(fields): body.write(f"--{boundary}\r\n".encode("latin-1")) writer(body).write(field.render_headers()) data = field.data if isinstance(data, int): data = str(data) # Backwards compatibility if isinstance(data, str): writer(body).write(data) else: body.write(data) body.write(b"\r\n") body.write(f"--{boundary}--\r\n".encode("latin-1")) content_type = f"multipart/form-data; boundary={boundary}" return body.getvalue(), content_type PKQZ3xXxXpoolmanager.pynu[from __future__ import annotations import functools import logging import typing import warnings from types import TracebackType from urllib.parse import urljoin from ._collections import RecentlyUsedContainer from ._request_methods import RequestMethods from .connection import ProxyConfig from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme from .exceptions import ( LocationValueError, MaxRetryError, ProxySchemeUnknown, URLSchemeUnknown, ) from .response import BaseHTTPResponse from .util.connection import _TYPE_SOCKET_OPTIONS from .util.proxy import connection_requires_http_tunnel from .util.retry import Retry from .util.timeout import Timeout from .util.url import Url, parse_url if typing.TYPE_CHECKING: import ssl from typing_extensions import Literal __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] log = logging.getLogger(__name__) SSL_KEYWORDS = ( "key_file", "cert_file", "cert_reqs", "ca_certs", "ssl_version", "ssl_minimum_version", "ssl_maximum_version", "ca_cert_dir", "ssl_context", "key_password", "server_hostname", ) # Default value for `blocksize` - a new parameter introduced to # http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 _DEFAULT_BLOCKSIZE = 16384 _SelfT = typing.TypeVar("_SelfT") class PoolKey(typing.NamedTuple): """ All known keyword arguments that could be provided to the pool manager, its pools, or the underlying connections. All custom key schemes should include the fields in this key at a minimum. """ key_scheme: str key_host: str key_port: int | None key_timeout: Timeout | float | int | None key_retries: Retry | bool | int | None key_block: bool | None key_source_address: tuple[str, int] | None key_key_file: str | None key_key_password: str | None key_cert_file: str | None key_cert_reqs: str | None key_ca_certs: str | None key_ssl_version: int | str | None key_ssl_minimum_version: ssl.TLSVersion | None key_ssl_maximum_version: ssl.TLSVersion | None key_ca_cert_dir: str | None key_ssl_context: ssl.SSLContext | None key_maxsize: int | None key_headers: frozenset[tuple[str, str]] | None key__proxy: Url | None key__proxy_headers: frozenset[tuple[str, str]] | None key__proxy_config: ProxyConfig | None key_socket_options: _TYPE_SOCKET_OPTIONS | None key__socks_options: frozenset[tuple[str, str]] | None key_assert_hostname: bool | str | None key_assert_fingerprint: str | None key_server_hostname: str | None key_blocksize: int | None def _default_key_normalizer( key_class: type[PoolKey], request_context: dict[str, typing.Any] ) -> PoolKey: """ Create a pool key out of a request context dictionary. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :type key_class: namedtuple :param request_context: A dictionary-like object that contain the context for a request. :type request_context: dict :return: A namedtuple that can be used as a connection pool key. :rtype: PoolKey """ # Since we mutate the dictionary, make a copy first context = request_context.copy() context["scheme"] = context["scheme"].lower() context["host"] = context["host"].lower() # These are both dictionaries and need to be transformed into frozensets for key in ("headers", "_proxy_headers", "_socks_options"): if key in context and context[key] is not None: context[key] = frozenset(context[key].items()) # The socket_options key may be a list and needs to be transformed into a # tuple. socket_opts = context.get("socket_options") if socket_opts is not None: context["socket_options"] = tuple(socket_opts) # Map the kwargs to the names in the namedtuple - this is necessary since # namedtuples can't have fields starting with '_'. for key in list(context.keys()): context["key_" + key] = context.pop(key) # Default to ``None`` for keys missing from the context for field in key_class._fields: if field not in context: context[field] = None # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context if context.get("key_blocksize") is None: context["key_blocksize"] = _DEFAULT_BLOCKSIZE return key_class(**context) #: A dictionary that maps a scheme to a callable that creates a pool key. #: This can be used to alter the way pool keys are constructed, if desired. #: Each PoolManager makes a copy of this dictionary so they can be configured #: globally here, or individually on the instance. key_fn_by_scheme = { "http": functools.partial(_default_key_normalizer, PoolKey), "https": functools.partial(_default_key_normalizer, PoolKey), } pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} class PoolManager(RequestMethods): """ Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \\**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example: .. code-block:: python import urllib3 http = urllib3.PoolManager(num_pools=2) resp1 = http.request("GET", "https://google.com/") resp2 = http.request("GET", "https://google.com/mail") resp3 = http.request("GET", "https://yahoo.com/") print(len(http.pools)) # 2 """ proxy: Url | None = None proxy_config: ProxyConfig | None = None def __init__( self, num_pools: int = 10, headers: typing.Mapping[str, str] | None = None, **connection_pool_kw: typing.Any, ) -> None: super().__init__(headers) self.connection_pool_kw = connection_pool_kw self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool] self.pools = RecentlyUsedContainer(num_pools) # Locally set the pool classes and keys so other PoolManagers can # override them. self.pool_classes_by_scheme = pool_classes_by_scheme self.key_fn_by_scheme = key_fn_by_scheme.copy() def __enter__(self: _SelfT) -> _SelfT: return self def __exit__( self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> Literal[False]: self.clear() # Return False to re-raise any potential exceptions return False def _new_pool( self, scheme: str, host: str, port: int, request_context: dict[str, typing.Any] | None = None, ) -> HTTPConnectionPool: """ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments to the pool class used. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme] if request_context is None: request_context = self.connection_pool_kw.copy() # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly # set to 'None' in the request_context. if request_context.get("blocksize") is None: request_context["blocksize"] = _DEFAULT_BLOCKSIZE # Although the context has everything necessary to create the pool, # this function has historically only used the scheme, host, and port # in the positional args. When an API change is acceptable these can # be removed. for key in ("scheme", "host", "port"): request_context.pop(key, None) if scheme == "http": for kw in SSL_KEYWORDS: request_context.pop(kw, None) return pool_cls(host, port, **request_context) def clear(self) -> None: """ Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. """ self.pools.clear() def connection_from_host( self, host: str | None, port: int | None = None, scheme: str | None = "http", pool_kwargs: dict[str, typing.Any] | None = None, ) -> HTTPConnectionPool: """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context["scheme"] = scheme or "http" if not port: port = port_by_scheme.get(request_context["scheme"].lower(), 80) request_context["port"] = port request_context["host"] = host return self.connection_from_context(request_context) def connection_from_context( self, request_context: dict[str, typing.Any] ) -> HTTPConnectionPool: """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ if "strict" in request_context: warnings.warn( "The 'strict' parameter is no longer needed on Python 3+. " "This will raise an error in urllib3 v2.1.0.", DeprecationWarning, ) request_context.pop("strict") scheme = request_context["scheme"].lower() pool_key_constructor = self.key_fn_by_scheme.get(scheme) if not pool_key_constructor: raise URLSchemeUnknown(scheme) pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key, request_context=request_context) def connection_from_pool_key( self, pool_key: PoolKey, request_context: dict[str, typing.Any] ) -> HTTPConnectionPool: """ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. """ with self.pools.lock: # If the scheme, host, or port doesn't match existing open # connections, open a new ConnectionPool. pool = self.pools.get(pool_key) if pool: return pool # Make a fresh ConnectionPool of the desired type scheme = request_context["scheme"] host = request_context["host"] port = request_context["port"] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool return pool def connection_from_url( self, url: str, pool_kwargs: dict[str, typing.Any] | None = None ) -> HTTPConnectionPool: """ Similar to :func:`urllib3.connectionpool.connection_from_url`. If ``pool_kwargs`` is not provided and a new pool needs to be constructed, ``self.connection_pool_kw`` is used to initialize the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` is provided, it is used instead. Note that if a new pool does not need to be created for the request, the provided ``pool_kwargs`` are not used. """ u = parse_url(url) return self.connection_from_host( u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs ) def _merge_pool_kwargs( self, override: dict[str, typing.Any] | None ) -> dict[str, typing.Any]: """ Merge a dictionary of override values for self.connection_pool_kw. This does not modify self.connection_pool_kw and returns a new dict. Any keys in the override dictionary with a value of ``None`` are removed from the merged dictionary. """ base_pool_kwargs = self.connection_pool_kw.copy() if override: for key, value in override.items(): if value is None: try: del base_pool_kwargs[key] except KeyError: pass else: base_pool_kwargs[key] = value return base_pool_kwargs def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool: """ Indicates if the proxy requires the complete destination URL in the request. Normally this is only needed when not using an HTTP CONNECT tunnel. """ if self.proxy is None: return False return not connection_requires_http_tunnel( self.proxy, self.proxy_config, parsed_url.scheme ) def urlopen( # type: ignore[override] self, method: str, url: str, redirect: bool = True, **kw: typing.Any ) -> BaseHTTPResponse: """ Same as :meth:`urllib3.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) if u.scheme is None: warnings.warn( "URLs without a scheme (ie 'https://') are deprecated and will raise an error " "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs " "start with 'https://' or 'http://'. Read more in this issue: " "https://github.com/urllib3/urllib3/issues/2920", category=DeprecationWarning, stacklevel=2, ) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw["assert_same_host"] = False kw["redirect"] = False if "headers" not in kw: kw["headers"] = self.headers if self._proxy_requires_url_absolute_form(u): response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) # RFC 7231, Section 6.4.4 if response.status == 303: method = "GET" retries = kw.get("retries") if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) # Strip headers marked as unsafe to forward to the redirected location. # Check remove_headers_on_redirect to avoid a potential network call within # conn.is_same_host() which may use socket.gethostbyname() in the future. if retries.remove_headers_on_redirect and not conn.is_same_host( redirect_location ): new_headers = kw["headers"].copy() for header in kw["headers"]: if header.lower() in retries.remove_headers_on_redirect: new_headers.pop(header, None) kw["headers"] = new_headers try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: response.drain_conn() raise return response kw["retries"] = retries kw["redirect"] = redirect log.info("Redirecting %s -> %s", url, redirect_location) response.drain_conn() return self.urlopen(method, redirect_location, **kw) class ProxyManager(PoolManager): """ Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary containing headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. :param proxy_ssl_context: The proxy SSL context is used to establish the TLS connection to the proxy when using HTTPS proxies. :param use_forwarding_for_https: (Defaults to False) If set to True will forward requests to the HTTPS proxy to be made on behalf of the client instead of creating a TLS tunnel via the CONNECT method. **Enabling this flag means that request and response headers and content will be visible from the HTTPS proxy** whereas tunneling keeps request and response headers and content private. IP address, target hostname, SNI, and port are always visible to an HTTPS proxy even when this flag is disabled. :param proxy_assert_hostname: The hostname of the certificate to verify against. :param proxy_assert_fingerprint: The fingerprint of the certificate to verify against. Example: .. code-block:: python import urllib3 proxy = urllib3.ProxyManager("https://localhost:3128/") resp1 = proxy.request("GET", "https://google.com/") resp2 = proxy.request("GET", "https://httpbin.org/") print(len(proxy.pools)) # 1 resp3 = proxy.request("GET", "https://httpbin.org/") resp4 = proxy.request("GET", "https://twitter.com/") print(len(proxy.pools)) # 3 """ def __init__( self, proxy_url: str, num_pools: int = 10, headers: typing.Mapping[str, str] | None = None, proxy_headers: typing.Mapping[str, str] | None = None, proxy_ssl_context: ssl.SSLContext | None = None, use_forwarding_for_https: bool = False, proxy_assert_hostname: None | str | Literal[False] = None, proxy_assert_fingerprint: str | None = None, **connection_pool_kw: typing.Any, ) -> None: if isinstance(proxy_url, HTTPConnectionPool): str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}" else: str_proxy_url = proxy_url proxy = parse_url(str_proxy_url) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) self.proxy = proxy self.proxy_headers = proxy_headers or {} self.proxy_ssl_context = proxy_ssl_context self.proxy_config = ProxyConfig( proxy_ssl_context, use_forwarding_for_https, proxy_assert_hostname, proxy_assert_fingerprint, ) connection_pool_kw["_proxy"] = self.proxy connection_pool_kw["_proxy_headers"] = self.proxy_headers connection_pool_kw["_proxy_config"] = self.proxy_config super().__init__(num_pools, headers, **connection_pool_kw) def connection_from_host( self, host: str | None, port: int | None = None, scheme: str | None = "http", pool_kwargs: dict[str, typing.Any] | None = None, ) -> HTTPConnectionPool: if scheme == "https": return super().connection_from_host( host, port, scheme, pool_kwargs=pool_kwargs ) return super().connection_from_host( self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr] ) def _set_proxy_headers( self, url: str, headers: typing.Mapping[str, str] | None = None ) -> typing.Mapping[str, str]: """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ headers_ = {"Accept": "*/*"} netloc = parse_url(url).netloc if netloc: headers_["Host"] = netloc if headers: headers_.update(headers) return headers_ def urlopen( # type: ignore[override] self, method: str, url: str, redirect: bool = True, **kw: typing.Any ) -> BaseHTTPResponse: "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): # For connections using HTTP CONNECT, httplib sets the necessary # headers on the CONNECT to the proxy. If we're not using CONNECT, # we'll definitely need to set 'Host' at the very least. headers = kw.get("headers", self.headers) kw["headers"] = self._set_proxy_headers(url, headers) return super().urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager: return ProxyManager(proxy_url=url, **kw) PKQZ]]py.typednu[# Instruct type checkers to look for inline type annotations in this package. # See PEP 561. PKQZJ response.pynu[from __future__ import annotations import collections import io import json as _json import logging import re import sys import typing import warnings import zlib from contextlib import contextmanager from http.client import HTTPMessage as _HttplibHTTPMessage from http.client import HTTPResponse as _HttplibHTTPResponse from socket import timeout as SocketTimeout try: try: import brotlicffi as brotli # type: ignore[import] except ImportError: import brotli # type: ignore[import] except ImportError: brotli = None try: import zstandard as zstd # type: ignore[import] # The package 'zstandard' added the 'eof' property starting # in v0.18.0 which we require to ensure a complete and # valid zstd stream was fed into the ZstdDecoder. # See: https://github.com/urllib3/urllib3/pull/2624 _zstd_version = _zstd_version = tuple( map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] ) if _zstd_version < (0, 18): # Defensive: zstd = None except (AttributeError, ImportError, ValueError): # Defensive: zstd = None from . import util from ._base_connection import _TYPE_BODY from ._collections import HTTPHeaderDict from .connection import BaseSSLError, HTTPConnection, HTTPException from .exceptions import ( BodyNotHttplibCompatible, DecodeError, HTTPError, IncompleteRead, InvalidChunkLength, InvalidHeader, ProtocolError, ReadTimeoutError, ResponseNotChunked, SSLError, ) from .util.response import is_fp_closed, is_response_to_head from .util.retry import Retry if typing.TYPE_CHECKING: from typing_extensions import Literal from .connectionpool import HTTPConnectionPool log = logging.getLogger(__name__) class ContentDecoder: def decompress(self, data: bytes) -> bytes: raise NotImplementedError() def flush(self) -> bytes: raise NotImplementedError() class DeflateDecoder(ContentDecoder): def __init__(self) -> None: self._first_try = True self._data = b"" self._obj = zlib.decompressobj() def decompress(self, data: bytes) -> bytes: if not data: return data if not self._first_try: return self._obj.decompress(data) self._data += data try: decompressed = self._obj.decompress(data) if decompressed: self._first_try = False self._data = None # type: ignore[assignment] return decompressed except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None # type: ignore[assignment] def flush(self) -> bytes: return self._obj.flush() class GzipDecoderState: FIRST_MEMBER = 0 OTHER_MEMBERS = 1 SWALLOW_DATA = 2 class GzipDecoder(ContentDecoder): def __init__(self) -> None: self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) self._state = GzipDecoderState.FIRST_MEMBER def decompress(self, data: bytes) -> bytes: ret = bytearray() if self._state == GzipDecoderState.SWALLOW_DATA or not data: return bytes(ret) while True: try: ret += self._obj.decompress(data) except zlib.error: previous_state = self._state # Ignore data after the first error self._state = GzipDecoderState.SWALLOW_DATA if previous_state == GzipDecoderState.OTHER_MEMBERS: # Allow trailing garbage acceptable in other gzip clients return bytes(ret) raise data = self._obj.unused_data if not data: return bytes(ret) self._state = GzipDecoderState.OTHER_MEMBERS self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) def flush(self) -> bytes: return self._obj.flush() if brotli is not None: class BrotliDecoder(ContentDecoder): # Supports both 'brotlipy' and 'Brotli' packages # since they share an import name. The top branches # are for 'brotlipy' and bottom branches for 'Brotli' def __init__(self) -> None: self._obj = brotli.Decompressor() if hasattr(self._obj, "decompress"): setattr(self, "decompress", self._obj.decompress) else: setattr(self, "decompress", self._obj.process) def flush(self) -> bytes: if hasattr(self._obj, "flush"): return self._obj.flush() # type: ignore[no-any-return] return b"" if zstd is not None: class ZstdDecoder(ContentDecoder): def __init__(self) -> None: self._obj = zstd.ZstdDecompressor().decompressobj() def decompress(self, data: bytes) -> bytes: if not data: return b"" data_parts = [self._obj.decompress(data)] while self._obj.eof and self._obj.unused_data: unused_data = self._obj.unused_data self._obj = zstd.ZstdDecompressor().decompressobj() data_parts.append(self._obj.decompress(unused_data)) return b"".join(data_parts) def flush(self) -> bytes: ret = self._obj.flush() # note: this is a no-op if not self._obj.eof: raise DecodeError("Zstandard data is incomplete") return ret # type: ignore[no-any-return] class MultiDecoder(ContentDecoder): """ From RFC7231: If one or more encodings have been applied to a representation, the sender that applied the encodings MUST generate a Content-Encoding header field that lists the content codings in the order in which they were applied. """ def __init__(self, modes: str) -> None: self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] def flush(self) -> bytes: return self._decoders[0].flush() def decompress(self, data: bytes) -> bytes: for d in reversed(self._decoders): data = d.decompress(data) return data def _get_decoder(mode: str) -> ContentDecoder: if "," in mode: return MultiDecoder(mode) if mode == "gzip": return GzipDecoder() if brotli is not None and mode == "br": return BrotliDecoder() if zstd is not None and mode == "zstd": return ZstdDecoder() return DeflateDecoder() class BytesQueueBuffer: """Memory-efficient bytes buffer To return decoded data in read() and still follow the BufferedIOBase API, we need a buffer to always return the correct amount of bytes. This buffer should be filled using calls to put() Our maximum memory usage is determined by the sum of the size of: * self.buffer, which contains the full data * the largest chunk that we will copy in get() The worst case scenario is a single chunk, in which case we'll make a full copy of the data inside get(). """ def __init__(self) -> None: self.buffer: typing.Deque[bytes] = collections.deque() self._size: int = 0 def __len__(self) -> int: return self._size def put(self, data: bytes) -> None: self.buffer.append(data) self._size += len(data) def get(self, n: int) -> bytes: if n == 0: return b"" elif not self.buffer: raise RuntimeError("buffer is empty") elif n < 0: raise ValueError("n should be > 0") fetched = 0 ret = io.BytesIO() while fetched < n: remaining = n - fetched chunk = self.buffer.popleft() chunk_length = len(chunk) if remaining < chunk_length: left_chunk, right_chunk = chunk[:remaining], chunk[remaining:] ret.write(left_chunk) self.buffer.appendleft(right_chunk) self._size -= remaining break else: ret.write(chunk) self._size -= chunk_length fetched += chunk_length if not self.buffer: break return ret.getvalue() class BaseHTTPResponse(io.IOBase): CONTENT_DECODERS = ["gzip", "deflate"] if brotli is not None: CONTENT_DECODERS += ["br"] if zstd is not None: CONTENT_DECODERS += ["zstd"] REDIRECT_STATUSES = [301, 302, 303, 307, 308] DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error) if brotli is not None: DECODER_ERROR_CLASSES += (brotli.error,) if zstd is not None: DECODER_ERROR_CLASSES += (zstd.ZstdError,) def __init__( self, *, headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, status: int, version: int, reason: str | None, decode_content: bool, request_url: str | None, retries: Retry | None = None, ) -> None: if isinstance(headers, HTTPHeaderDict): self.headers = headers else: self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] self.status = status self.version = version self.reason = reason self.decode_content = decode_content self._has_decoded_content = False self._request_url: str | None = request_url self.retries = retries self.chunked = False tr_enc = self.headers.get("transfer-encoding", "").lower() # Don't incur the penalty of creating a list and then discarding it encodings = (enc.strip() for enc in tr_enc.split(",")) if "chunked" in encodings: self.chunked = True self._decoder: ContentDecoder | None = None def get_redirect_location(self) -> str | None | Literal[False]: """ Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: return self.headers.get("location") return False @property def data(self) -> bytes: raise NotImplementedError() def json(self) -> typing.Any: """ Parses the body of the HTTP response as JSON. To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. Read more :ref:`here `. """ data = self.data.decode("utf-8") return _json.loads(data) @property def url(self) -> str | None: raise NotImplementedError() @url.setter def url(self, url: str | None) -> None: raise NotImplementedError() @property def connection(self) -> HTTPConnection | None: raise NotImplementedError() @property def retries(self) -> Retry | None: return self._retries @retries.setter def retries(self, retries: Retry | None) -> None: # Override the request_url if retries has a redirect location. if retries is not None and retries.history: self.url = retries.history[-1].redirect_location self._retries = retries def stream( self, amt: int | None = 2**16, decode_content: bool | None = None ) -> typing.Iterator[bytes]: raise NotImplementedError() def read( self, amt: int | None = None, decode_content: bool | None = None, cache_content: bool = False, ) -> bytes: raise NotImplementedError() def read_chunked( self, amt: int | None = None, decode_content: bool | None = None, ) -> typing.Iterator[bytes]: raise NotImplementedError() def release_conn(self) -> None: raise NotImplementedError() def drain_conn(self) -> None: raise NotImplementedError() def close(self) -> None: raise NotImplementedError() def _init_decoder(self) -> None: """ Set-up the _decoder attribute if necessary. """ # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 content_encoding = self.headers.get("content-encoding", "").lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) elif "," in content_encoding: encodings = [ e.strip() for e in content_encoding.split(",") if e.strip() in self.CONTENT_DECODERS ] if encodings: self._decoder = _get_decoder(content_encoding) def _decode( self, data: bytes, decode_content: bool | None, flush_decoder: bool ) -> bytes: """ Decode the data passed in and potentially flush the decoder. """ if not decode_content: if self._has_decoded_content: raise RuntimeError( "Calling read(decode_content=False) is not supported after " "read(decode_content=True) was called." ) return data try: if self._decoder: data = self._decoder.decompress(data) self._has_decoded_content = True except self.DECODER_ERROR_CLASSES as e: content_encoding = self.headers.get("content-encoding", "").lower() raise DecodeError( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding, e, ) from e if flush_decoder: data += self._flush_decoder() return data def _flush_decoder(self) -> bytes: """ Flushes the decoder. Should only be called if the decoder is actually being used. """ if self._decoder: return self._decoder.decompress(b"") + self._decoder.flush() return b"" # Compatibility methods for `io` module def readinto(self, b: bytearray) -> int: temp = self.read(len(b)) if len(temp) == 0: return 0 else: b[: len(temp)] = temp return len(temp) # Compatibility methods for http.client.HTTPResponse def getheaders(self) -> HTTPHeaderDict: warnings.warn( "HTTPResponse.getheaders() is deprecated and will be removed " "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.", category=DeprecationWarning, stacklevel=2, ) return self.headers def getheader(self, name: str, default: str | None = None) -> str | None: warnings.warn( "HTTPResponse.getheader() is deprecated and will be removed " "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).", category=DeprecationWarning, stacklevel=2, ) return self.headers.get(name, default) # Compatibility method for http.cookiejar def info(self) -> HTTPHeaderDict: return self.headers def geturl(self) -> str | None: return self.url class HTTPResponse(BaseHTTPResponse): """ HTTP Response container. Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param original_response: When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. """ def __init__( self, body: _TYPE_BODY = "", headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, status: int = 0, version: int = 0, reason: str | None = None, preload_content: bool = True, decode_content: bool = True, original_response: _HttplibHTTPResponse | None = None, pool: HTTPConnectionPool | None = None, connection: HTTPConnection | None = None, msg: _HttplibHTTPMessage | None = None, retries: Retry | None = None, enforce_content_length: bool = True, request_method: str | None = None, request_url: str | None = None, auto_close: bool = True, ) -> None: super().__init__( headers=headers, status=status, version=version, reason=reason, decode_content=decode_content, request_url=request_url, retries=retries, ) self.enforce_content_length = enforce_content_length self.auto_close = auto_close self._body = None self._fp: _HttplibHTTPResponse | None = None self._original_response = original_response self._fp_bytes_read = 0 self.msg = msg if body and isinstance(body, (str, bytes)): self._body = body self._pool = pool self._connection = connection if hasattr(body, "read"): self._fp = body # type: ignore[assignment] # Are we using the chunked-style of transfer encoding? self.chunk_left: int | None = None # Determine length of response self.length_remaining = self._init_length(request_method) # Used to return the correct amount of bytes for partial read()s self._decoded_buffer = BytesQueueBuffer() # If requested, preload the body. if preload_content and not self._body: self._body = self.read(decode_content=decode_content) def release_conn(self) -> None: if not self._pool or not self._connection: return None self._pool._put_conn(self._connection) self._connection = None def drain_conn(self) -> None: """ Read and discard any remaining HTTP response data in the response connection. Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. """ try: self.read() except (HTTPError, OSError, BaseSSLError, HTTPException): pass @property def data(self) -> bytes: # For backwards-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body # type: ignore[return-value] if self._fp: return self.read(cache_content=True) return None # type: ignore[return-value] @property def connection(self) -> HTTPConnection | None: return self._connection def isclosed(self) -> bool: return is_fp_closed(self._fp) def tell(self) -> int: """ Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read def _init_length(self, request_method: str | None) -> int | None: """ Set initial length value for Response content if available. """ length: int | None content_length: str | None = self.headers.get("content-length") if content_length is not None: if self.chunked: # This Response will fail with an IncompleteRead if it can't be # received as chunked. This method falls back to attempt reading # the response before raising an exception. log.warning( "Received response with both Content-Length and " "Transfer-Encoding set. This is expressly forbidden " "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " "attempting to process response as Transfer-Encoding: " "chunked." ) return None try: # RFC 7230 section 3.3.2 specifies multiple content lengths can # be sent in a single Content-Length header # (e.g. Content-Length: 42, 42). This line ensures the values # are all valid ints and that as long as the `set` length is 1, # all values are the same. Otherwise, the header is invalid. lengths = {int(val) for val in content_length.split(",")} if len(lengths) > 1: raise InvalidHeader( "Content-Length contained multiple " "unmatching values (%s)" % content_length ) length = lengths.pop() except ValueError: length = None else: if length < 0: length = None else: # if content_length is None length = None # Convert status to int for comparison # In some cases, httplib returns a status of "_UNKNOWN" try: status = int(self.status) except ValueError: status = 0 # Check for responses that shouldn't include a body if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": length = 0 return length @contextmanager def _error_catcher(self) -> typing.Generator[None, None, None]: """ Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. """ clean_exit = False try: try: yield except SocketTimeout as e: # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but # there is yet no clean way to get at it from this context. raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? if "read operation timed out" not in str(e): # SSL errors related to framing/MAC get wrapped and reraised here raise SSLError(e) from e raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] except (HTTPException, OSError) as e: # This includes IncompleteRead. raise ProtocolError(f"Connection broken: {e!r}", e) from e # If no exception is thrown, we should avoid cleaning up # unnecessarily. clean_exit = True finally: # If we didn't terminate cleanly, we need to throw away our # connection. if not clean_exit: # The response may not be closed but we're not going to use it # anymore so close it now to ensure that the connection is # released back to the pool. if self._original_response: self._original_response.close() # Closing the response may not actually be sufficient to close # everything, so if we have a hold of the connection close that # too. if self._connection: self._connection.close() # If we hold the original response but it's closed now, we should # return the connection back to the pool. if self._original_response and self._original_response.isclosed(): self.release_conn() def _fp_read(self, amt: int | None = None) -> bytes: """ Read a response with the thought that reading the number of bytes larger than can fit in a 32-bit int at a time via SSL in some known cases leads to an overflow error that has to be prevented if `amt` or `self.length_remaining` indicate that a problem may happen. The known cases: * 3.8 <= CPython < 3.9.7 because of a bug https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. * urllib3 injected with pyOpenSSL-backed SSL-support. * CPython < 3.10 only when `amt` does not fit 32-bit int. """ assert self._fp c_int_max = 2**31 - 1 if ( ( (amt and amt > c_int_max) or (self.length_remaining and self.length_remaining > c_int_max) ) and not util.IS_SECURETRANSPORT and (util.IS_PYOPENSSL or sys.version_info < (3, 10)) ): buffer = io.BytesIO() # Besides `max_chunk_amt` being a maximum chunk size, it # affects memory overhead of reading a response by this # method in CPython. # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum # chunk size that does not lead to an overflow error, but # 256 MiB is a compromise. max_chunk_amt = 2**28 while amt is None or amt != 0: if amt is not None: chunk_amt = min(amt, max_chunk_amt) amt -= chunk_amt else: chunk_amt = max_chunk_amt data = self._fp.read(chunk_amt) if not data: break buffer.write(data) del data # to reduce peak memory usage by `max_chunk_amt`. return buffer.getvalue() else: # StringIO doesn't like amt=None return self._fp.read(amt) if amt is not None else self._fp.read() def _raw_read( self, amt: int | None = None, ) -> bytes: """ Reads `amt` of bytes from the socket. """ if self._fp is None: return None # type: ignore[return-value] fp_closed = getattr(self._fp, "closed", False) with self._error_catcher(): data = self._fp_read(amt) if not fp_closed else b"" if amt is not None and amt != 0 and not data: # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ # already do. However, versions of python released before # December 15, 2012 (http://bugs.python.org/issue16298) do # not properly close the connection in all cases. There is # no harm in redundantly calling close. self._fp.close() if ( self.enforce_content_length and self.length_remaining is not None and self.length_remaining != 0 ): # This is an edge case that httplib failed to cover due # to concerns of backward compatibility. We're # addressing it here to make sure IncompleteRead is # raised during streaming, so all calls with incorrect # Content-Length are caught. raise IncompleteRead(self._fp_bytes_read, self.length_remaining) if data: self._fp_bytes_read += len(data) if self.length_remaining is not None: self.length_remaining -= len(data) return data def read( self, amt: int | None = None, decode_content: bool | None = None, cache_content: bool = False, ) -> bytes: """ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ self._init_decoder() if decode_content is None: decode_content = self.decode_content if amt is not None: cache_content = False if len(self._decoded_buffer) >= amt: return self._decoded_buffer.get(amt) data = self._raw_read(amt) flush_decoder = False if amt is None: flush_decoder = True elif amt != 0 and not data: flush_decoder = True if not data and len(self._decoded_buffer) == 0: return data if amt is None: data = self._decode(data, decode_content, flush_decoder) if cache_content: self._body = data else: # do not waste memory on buffer when not decoding if not decode_content: if self._has_decoded_content: raise RuntimeError( "Calling read(decode_content=False) is not supported after " "read(decode_content=True) was called." ) return data decoded_data = self._decode(data, decode_content, flush_decoder) self._decoded_buffer.put(decoded_data) while len(self._decoded_buffer) < amt and data: # TODO make sure to initially read enough data to get past the headers # For example, the GZ file header takes 10 bytes, we don't want to read # it one byte at a time data = self._raw_read(amt) decoded_data = self._decode(data, decode_content, flush_decoder) self._decoded_buffer.put(decoded_data) data = self._decoded_buffer.get(amt) return data def stream( self, amt: int | None = 2**16, decode_content: bool | None = None ) -> typing.Generator[bytes, None, None]: """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ if self.chunked and self.supports_chunked_reads(): yield from self.read_chunked(amt, decode_content=decode_content) else: while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0: data = self.read(amt=amt, decode_content=decode_content) if data: yield data # Overrides from io.IOBase def readable(self) -> bool: return True def close(self) -> None: if not self.closed and self._fp: self._fp.close() if self._connection: self._connection.close() if not self.auto_close: io.IOBase.close(self) @property def closed(self) -> bool: if not self.auto_close: return io.IOBase.closed.__get__(self) # type: ignore[no-any-return] elif self._fp is None: return True elif hasattr(self._fp, "isclosed"): return self._fp.isclosed() elif hasattr(self._fp, "closed"): return self._fp.closed else: return True def fileno(self) -> int: if self._fp is None: raise OSError("HTTPResponse has no file to get a fileno from") elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: raise OSError( "The file-like object this HTTPResponse is wrapped " "around has no file descriptor" ) def flush(self) -> None: if ( self._fp is not None and hasattr(self._fp, "flush") and not getattr(self._fp, "closed", False) ): return self._fp.flush() def supports_chunked_reads(self) -> bool: """ Checks if the underlying file-like object looks like a :class:`http.client.HTTPResponse` object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ return hasattr(self._fp, "fp") def _update_chunk_length(self) -> None: # First, we'll figure out length of a chunk and then # we'll try to read it from socket. if self.chunk_left is not None: return None line = self._fp.fp.readline() # type: ignore[union-attr] line = line.split(b";", 1)[0] try: self.chunk_left = int(line, 16) except ValueError: # Invalid chunked protocol response, abort. self.close() raise InvalidChunkLength(self, line) from None def _handle_chunk(self, amt: int | None) -> bytes: returned_chunk = None if amt is None: chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] returned_chunk = chunk self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. self.chunk_left = None elif self.chunk_left is not None and amt < self.chunk_left: value = self._fp._safe_read(amt) # type: ignore[union-attr] self.chunk_left = self.chunk_left - amt returned_chunk = value elif amt == self.chunk_left: value = self._fp._safe_read(amt) # type: ignore[union-attr] self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. self.chunk_left = None returned_chunk = value else: # amt > self.chunk_left returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. self.chunk_left = None return returned_chunk # type: ignore[no-any-return] def read_chunked( self, amt: int | None = None, decode_content: bool | None = None ) -> typing.Generator[bytes, None, None]: """ Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ self._init_decoder() # FIXME: Rewrite this method and make it a class with a better structured logic. if not self.chunked: raise ResponseNotChunked( "Response is not chunked. " "Header 'transfer-encoding: chunked' is missing." ) if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( "Body should be http.client.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks." ) with self._error_catcher(): # Don't bother reading the body of a HEAD request. if self._original_response and is_response_to_head(self._original_response): self._original_response.close() return None # If a response is already read and closed # then return immediately. if self._fp.fp is None: # type: ignore[union-attr] return None while True: self._update_chunk_length() if self.chunk_left == 0: break chunk = self._handle_chunk(amt) decoded = self._decode( chunk, decode_content=decode_content, flush_decoder=False ) if decoded: yield decoded if decode_content: # On CPython and PyPy, we should never need to flush the # decoder. However, on Jython we *might* need to, so # lets defensively do it anyway. decoded = self._flush_decoder() if decoded: # Platform-specific: Jython. yield decoded # Chunk content ends with \r\n: discard it. while self._fp is not None: line = self._fp.fp.readline() if not line: # Some sites may not end with '\r\n'. break if line == b"\r\n": break # We read everything; close the "file". if self._original_response: self._original_response.close() @property def url(self) -> str | None: """ Returns the URL that was the source of this response. If the request that generated this response redirected, this method will return the final redirect location. """ return self._request_url @url.setter def url(self, url: str) -> None: self._request_url = url def __iter__(self) -> typing.Iterator[bytes]: buffer: list[bytes] = [] for chunk in self.stream(decode_content=True): if b"\n" in chunk: chunks = chunk.split(b"\n") yield b"".join(buffer) + chunks[0] + b"\n" for x in chunks[1:-1]: yield x + b"\n" if chunks[-1]: buffer = [chunks[-1]] else: buffer = [] else: buffer.append(chunk) if buffer: yield b"".join(buffer) PKQZ]?Dutil/__init__.pynu[# For backwards compatibility, provide imports that used to be here. from __future__ import annotations from .connection import is_connection_dropped from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers from .response import is_fp_closed from .retry import Retry from .ssl_ import ( ALPN_PROTOCOLS, IS_PYOPENSSL, IS_SECURETRANSPORT, SSLContext, assert_fingerprint, create_urllib3_context, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, ) from .timeout import Timeout from .url import Url, parse_url from .wait import wait_for_read, wait_for_write __all__ = ( "IS_PYOPENSSL", "IS_SECURETRANSPORT", "SSLContext", "ALPN_PROTOCOLS", "Retry", "Timeout", "Url", "assert_fingerprint", "create_urllib3_context", "is_connection_dropped", "is_fp_closed", "parse_url", "make_headers", "resolve_cert_reqs", "resolve_ssl_version", "ssl_wrap_socket", "wait_for_read", "wait_for_write", "SKIP_HEADER", "SKIPPABLE_HEADERS", ) PKQZ6)util/__pycache__/__init__.cpython-311.pycnu[ bgddlmZddlmZddlmZmZmZddlm Z ddl m Z ddl m Z mZmZmZmZmZmZmZmZddlmZdd lmZmZdd lmZmZd Zd S) ) annotations)is_connection_dropped) SKIP_HEADERSKIPPABLE_HEADERS make_headers) is_fp_closed)Retry) ALPN_PROTOCOLS IS_PYOPENSSLIS_SECURETRANSPORT SSLContextassert_fingerprintcreate_urllib3_contextresolve_cert_reqsresolve_ssl_versionssl_wrap_socket)Timeout)Url parse_url) wait_for_readwait_for_write)r r rr r rrrrrr rrrrrrrrrN) __future__r connectionrrequestrrrresponser retryr ssl_r r r rrrrrrtimeoutrurlrrwaitrr__all__L/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/__init__.pyr&s(""""""------AAAAAAAAAA""""""                      //////// r$PKQZkc+util/__pycache__/connection.cpython-311.pycnu[ bgnddlmZddlZddlZddlmZddlmZmZej ej e e ej e e ffZejrddlmZd!d Zeddfd"dZd#dZd$dZd%dZed ZdS)&) annotationsN)LocationParseError)_DEFAULT_TIMEOUT _TYPE_TIMEOUT)BaseHTTPConnectionconnr returnboolc|j S)z Returns True if the connection is dropped and should be closed. :param conn: :class:`urllib3.connection.HTTPConnection` object. ) is_connected)r s N/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/connection.pyis_connection_droppedrs   addresstuple[str, int]timeoutrsource_addresstuple[str, int] | Nonesocket_options_TYPE_SOCKET_OPTIONS | None socket.socketc|\}}|dr|d}d}t} |dn"#t$rt d|ddwxYwt j|||t jD]}|\} } } } } d} t j| | | }t|||tur| ||r| || | d}|cS#t$r"}|}||Yd}~d}~wwxYw| |#d}wxYwtd)akConnect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. [z[]Nidna'z', label empty or too longz!getaddrinfo returns an empty list) startswithstripallowed_gai_familyencode UnicodeErrorrsocket getaddrinfo SOCK_STREAM_set_socket_optionsr settimeoutbindconnectOSErrorclose)rrrrhostporterrfamilyresafsocktypeproto canonnamesasock_s rcreate_connectionr8s$JD$ s zz$ C  ! !FQ F QQQ !ET!E!E!EFFDPQ!$ff6HII-0*HeY =Xu55D n 5 5 5...((( * .))) LL   CKKK   C     ICJJJJ9:::s1AA6'A3D E 'EE EEr6optionsNonec,|dS|D] }|j| dS)N) setsockopt)r6r9opts rr&r&]s9rsocket.AddressFamilycDtj}tr tj}|S)zThis function is designed to work in the context of getaddrinfo, where family=socket.AF_UNSPEC is the default and will perform a DNS search for both IPv6 and IPv4 records.)r#AF_INETHAS_IPV6 AF_UNSPEC)r/s rr r gs ^F"! Mrr,strcd}d}tjrI tjtj}||dfd}n#t$rYnwxYw|r||S)z4Returns True if the system can bind an IPv6 address.NFrT)r#has_ipv6AF_INET6r( Exceptionr+)r,r6rEs r _has_ipv6rHrs DH    =11D IItQi HH    D   Os7A AAz::1)r r r r ) rrrrrrrrr r)r6rr9rr r:)r r>)r,rCr r ) __future__rr#typing exceptionsrrrrSequenceTupleintUnionbytes_TYPE_SOCKET_OPTIONS TYPE_CHECKING_base_connectionr rr8r&r rHrArrrUs!"""""" ++++++44444444v|Cfl3PU:>V4V'WX 6555555!!!!.-126 ?;?;?;?;?;D. 9U  rPKQZUI&util/__pycache__/proxy.cpython-311.pycnu[ bg|PddlmZddlZddlmZejrddlmZ dddZdS)) annotationsN)Url) ProxyConfig proxy_url Url | None proxy_configProxyConfig | Nonedestination_scheme str | NonereturnboolcJ|dS|dkrdS|jdkr |r |jrdSdS)a? Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc) NFhttphttpsT)schemeuse_forwarding_for_https)rr r s I/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/proxy.pyconnection_requires_http_tunnelr sR uV##u G##  $  1 $u 4)NNN)rr r r r r rr) __future__rtypingurlr TYPE_CHECKING connectionrrrrrs}""""""  )((((((!'+%)       rPKQZ;N""(util/__pycache__/request.cpython-311.pycnu[ bgUddlmZddlZddlZddlmZddlmZddlm Z ddl m Z ej rdd l mZd Zegd Zd Z ddlZn#e$rddlZYnwxYwed z Zn #e$rYnwxYw ddlZedz Zn #e$rYnwxYwGddeZejZded<ejeefZhdZ d2d3dZ!d4d$Z"d5d)Z#Gd*d+ej$Z%d6d1Z&dS)7) annotationsN) b64encode)Enum)UnrewindableBodyError)to_bytes)Finalz@@@SKIP_HEADER@@@)accept-encodinghost user-agentz gzip,deflatez,brz,zstdceZdZdZdS)_TYPE_FAILEDTELLrN)__name__ __module__ __qualname__tokenK/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/request.pyrr's EEErrzFinal[_TYPE_FAILEDTELL] _FAILEDTELL>GETHEADTRACEDELETECONNECTOPTIONS keep_alive bool | Noneaccept_encodingbool | list[str] | str | None user_agent str | None basic_authproxy_basic_auth disable_cachereturndict[str, str]ci}|rMt|trn2t|trd|}nt}||d<|r||d<|rd|d<|r:dt |d|d<|r:dt |d|d <|rd |d <|S) a Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example: .. code-block:: python import urllib3 print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} print(urllib3.util.make_headers(accept_encoding=True)) # {'accept-encoding': 'gzip,deflate'} ,r r z keep-alive connectionzBasic zlatin-1 authorizationzproxy-authorizationzno-cachez cache-control) isinstancestrlistjoinACCEPT_ENCODINGrencodedecode)rr r"r$r%r&headerss r make_headersr57s(^!G5 os + + .   . . .!hh77OO-O%4!"+ * - , H HYz00;;<<CCEE G G   N NY/66yAABBIIKK M M  ! .#- Nrbody typing.Anypos_TYPE_BODY_POSITION | Nonec|t||n>t|dd- |}n#t$r t}YnwxYw|S)z If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. Ntell) rewind_bodygetattrr;OSErrorr)r6r8s rset_file_positionr?so D# vt $ $ 0 ))++CC   CCC  Js;AAtyping.IO[typing.AnyStr]body_pos_TYPE_BODY_POSITIONNonec"t|dd}|Et|tr0 ||dS#t$r}t d|d}~wwxYw|t urt dt dt|d)z Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. seekNzAAn error occurred when rewinding request body for redirect/retry.zRUnable to record file position for rewinding request body during a redirect/retry.z1body_pos must be of type integer, instead it was .)r=r-intr>rr ValueErrortype)r6rA body_seekes rr<r<sfd++IHc!:!:  Ih        'S   [ # 4    QX Q Q Q   s 7 AAAc$eZdZUded<ded<dS)ChunksAndContentLengthztyping.Iterable[bytes] | Nonechunksz int | Nonecontent_lengthN)rrr__annotations__rrrrMrMs*))))rrMtyping.Any | Nonemethodr. blocksizerGc#d}|tvrd}nd}ntttfr&t f}t |d}ntdrd fd }|}d}n_ t}f}|j }nD#t$r7 t}d}n!#t$rtddwxYwYnwxYwt||S) aRTakes the HTTP request method, body, and blocksize and transforms them into an iterable of chunks to pass to socket.sendall() and an optional 'Content-Length' header. A 'Content-Length' of 'None' indicates the length of the body can't be determined so should use 'Transfer-Encoding: chunked' for framing instead. Nrreadr'typing.Iterable[bytes]c3Kttj} }|sdS|r|d}|V5)NTz iso-8859-1)r-io TextIOBaserUr2)r2 datablockrSr6s rchunk_readablez&body_to_chunks..chunk_readablesdbm44F  IIi00  E? ) 0 0 > >I  rzO'body' must be a bytes-like object, file-like object, or iterable. Instead was )rNrO)r'rV) upper_METHODS_NOT_EXPECTING_BODYr-r.bytesr lenhasattr memoryviewnbytes TypeErroriterrM)r6rRrSrNrOr[mvs` ` rbody_to_chunksrfsq$ | <<>>!< < <NN!NN D3, ' '''4.."VAY v  "'  !! 'D!!BWFYNN    d!%   A8<AA   " O O OOs*B)) C*4CC*C$$C*)C*)NNNNNN)rrr r!r"r#r$r#r%r#r&rr'r()r6r7r8r9r'r9)r6r@rArBr'rC)r6rQrRr.rSrGr'rM)' __future__rrXtypingbase64renumr exceptionsrutilr TYPE_CHECKINGtyping_extensionsr SKIP_HEADER frozensetSKIPPABLE_HEADERSr1 brotlicffi_unused_module_brotli ImportErrorbrotli zstandard_unused_module_zstdrrrrPUnionrGrBr]r5r?r< NamedTuplerMrfrrrrzs""""""" ...... ('''''' " IGGGHH /22222 ///....../ uOO   D ++++wOO   D t(8'= ====l3(8#89WVV#59!!#'!% LLLLL^(    <V. CPCPCPCPCPCPsBA A" AA"AA""A*)A*.A88B?BPKQZjM  )util/__pycache__/response.cpython-311.pycnu[ bg. RddlmZddlmZddlmZmZddlm Z dd Z ddZ ddZ dS)) annotationsN)!MultipartInvariantViolationDefectStartBoundaryNotFoundDefect)HeaderParsingErrorobjobjectreturnboolc |S#t$rYnwxYw |jS#t$rYnwxYw |jduS#t$rYnwxYwt d)zt Checks whether a given file-like object is closed. :param obj: The file-like object to check. Nz)Unable to determine whether fp is closed.)isclosedAttributeErrorclosedfp ValueError)rs L/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/response.py is_fp_closedr s ||~~       z       v~       @ A AAs) ##. ;;A AAheadershttplib.HTTPMessageNonecVt|tjs tdt |dd}|s2|}t|ttfr|}d|j D}|s|rt||dS)a> Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. zexpected httplib.Message, got .NcJg|] }t|ttf|!S) isinstancerr).0defects r z)assert_header_parsing..OsD  02ST  )defects unparsed_data) rhttplib HTTPMessage TypeErrortype is_multipart get_payloadbytesstrr r)rr!payloadr s rassert_header_parsingr+(s gw2 3 3KIgIIIJJJM    ! !$%%'' gs| , , $#MoGO-O  NNNNOOrresponsehttplib.HTTPResponsec@|j}|dkS)z Checks whether the request of a response has been a HEAD-request. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method. HEAD)_methodupper)r, method_strs ris_response_to_headr3[s#!J      ''r)rr r r )rrr r)r,r-r r ) __future__r http.clientclientr" email.errorsrr exceptionsrrr+r3rrrr9s""""""WWWWWWWW++++++BBBB>0O0O0O0Of ( ( ( ( ( (rPKQZ;x;R;R&util/__pycache__/retry.cpython-311.pycnu[ bgG0ddlmZddlZddlZddlZddlZddlZddlZddlm Z ddl m Z ddl m Z mZmZmZmZmZmZddlmZejr dd lmZdd lmZejeZGd d ejZGd dZ e de _!dS)) annotationsN) takewhile) TracebackType)ConnectTimeoutError InvalidHeader MaxRetryError ProtocolError ProxyErrorReadTimeoutError ResponseError)reraise)ConnectionPool)BaseHTTPResponsecBeZdZUded<ded<ded<ded<ded<d S) RequestHistory str | NonemethodurlException | Noneerror int | Nonestatusredirect_locationN)__name__ __module__ __qualname____annotations__I/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/retry.pyrrsKOOO!!!!!!r!rc:eZdZUdZegdZegdZedgZdZde d<dd d d d d ed d ed d d d ed fdQd'Z dRd*Z e dSdTd.Z dUd/ZdVd2ZdWd6ZdXd7ZdYd8ZdZd[d:Zd\d=Zd\d>Zd]d@Z d^d_dEZd`dFZ dadbdOZdcdPZd S)dRetryaRetry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. Retries can be defined as a default for a pool: .. code-block:: python retries = Retry(connect=5, read=2, redirect=5) http = PoolManager(retries=retries) response = http.request("GET", "https://example.com/") Or per-request (which overrides the default for the pool): .. code-block:: python response = http.request("GET", "https://example.com/", retries=Retry(10)) Retries can be disabled by passing ``False``: .. code-block:: python response = http.request("GET", "https://example.com/", retries=False) Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless retries are disabled, in which case the causing exception will be raised. :param int total: Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other counts. Set to ``0`` to fail on the first retry. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int connect: How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Set to ``0`` to fail on the first retry of this type. :param int read: How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Set to ``0`` to fail on the first retry of this type. :param int redirect: How many redirects to perform. Limit this to avoid infinite redirect loops. A redirect is a HTTP response with a status code 301, 302, 303, 307 or 308. Set to ``0`` to fail on the first retry of this type. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int status: How many times to retry on bad status codes. These are retries made on responses, where status code matches ``status_forcelist``. Set to ``0`` to fail on the first retry of this type. :param int other: How many times to retry on other errors. Other errors are errors that are not connect, read, redirect or status errors. These errors might be raised after the request was sent to the server, so the request might have side-effects. Set to ``0`` to fail on the first retry of this type. If ``total`` is not set, it's a good idea to set this to 0 to account for unexpected edge cases and avoid infinite retry loops. :param Collection allowed_methods: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. Set to a ``None`` value to retry on any verb. :param Collection status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in ``allowed_methods`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. :param float backoff_factor: A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: {backoff factor} * (2 ** ({number of previous retries})) seconds. If `backoff_jitter` is non-zero, this sleep is extended by:: random.uniform(0, {backoff jitter}) seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever be longer than `backoff_max`. By default, backoff is disabled (factor set to 0). :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: whether we should raise an exception, or return a response, if status falls in ``status_forcelist`` range and retries have been exhausted. :param tuple history: The history of the request encountered during each call to :meth:`~Retry.increment`. The list is in the order the requests occurred. Each list item is of class :class:`RequestHistory`. :param bool respect_retry_after_header: Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. :param Collection remove_headers_on_redirect: Sequence of headers to remove from the request when a response indicating a redirect is returned before firing off the redirected request. )HEADGETPUTDELETEOPTIONSTRACE)iii Authorizationxztyping.ClassVar[Retry]DEFAULT NrTtotalbool | int | Noneconnectrreadredirectrotherallowed_methodstyping.Collection[str] | Nonestatus_forcelisttyping.Collection[int] | Nonebackoff_factorfloat backoff_maxraise_on_redirectboolraise_on_statushistory!tuple[RequestHistory, ...] | Nonerespect_retry_after_headerremove_headers_on_redirecttyping.Collection[str]backoff_jitterreturnNonecL||_||_||_||_||_|dus|durd}d} ||_|p t |_||_| |_ | |_ | |_ | |_ | pd|_ ||_td|D|_||_dS)NFrr c3>K|]}|VdSN)lower).0hs r" z!Retry.__init__..s;4 4 AGGII4 4 4 4 4 4 r!)r0r2r3rr5r4setr8r6r:r<r=r?r@rB frozensetrCrE)selfr0r2r3r4rr5r6r8r:r<r=r?r@rBrCrEs r"__init__zRetry.__init__s*     u  H %   0 9CEE.,&!2.}" *D'*34 4 94 4 4 + + '-r!kw typing.Anyc jtdid|jd|jd|jd|jd|jd|jd|jd|jd |j d |j d |j d |j d |j d|jd|jd|j}||t%|di|S)Nr0r2r3r4rr5r6r8r:r<r=r?r@rCrBrEr )dictr0r2r3r4rr5r6r8r:r<r=r?r@rCrBrEupdatetype)rQrSparamss r"newz Retry.news4   ** LL  ]]  ;;  **  !00 "22  .. (( #44 !00 LL (,'F'F (,'F'F  ..! &  btDzz##F###r!retriesRetry | bool | int | Nonedefaultc| ||n|j}t|tr|St|od}|||}td|||S)z3Backwards-compatibility for the old retries format.N)r4z!Converted retries value: %r -> %r)r- isinstancer$r>logdebug)clsr[r4r] new_retriess r"from_intzRetry.from_int sq ?!(!4gg#+G gu % % N>>*dc'H555  5w LLLr!c ^tttdt|j}|dkrdS|jd|dz zz}|jdkr|tj|jzz }ttdt|j |S)zIFormula for computing the current backoff :rtype: float c|jduSrJ)r)xs r"z(Retry.get_backoff_time..%sA$74$?r!rrrr/) lenlistrreversedr@r:rErandomr;maxminr<)rQconsecutive_errors_len backoff_values r"get_backoff_timezRetry.get_backoff_times "% ??$,AWAWXX  " "  "Q & &1+q5Ka5O/PQ  # % % V]__t/BB BMSC 0-@@AABBBr! retry_afterstrc@tjd|rt|}nhtj|}|t d|tj|}|tjz }t|d}|S)Nz^\s*[0-9]+\s*$zInvalid Retry-After header: r) rematchintemailutils parsedate_tzr mktime_tztimerm)rQrrsecondsretry_date_tuple retry_dates r"parse_retry_afterzRetry.parse_retry_after0s 8%{ 3 3 /+&&GG${77 DD '#$P;$P$PQQQ../?@@J 49;;.Ggq//r!responser float | Nonech|jd}|dS||S)z(Get the value of Retry-After in seconds.z Retry-AfterN)headersgetrrQrrrs r"get_retry_afterzRetry.get_retry_afterAs9&**=99  4%%k222r!c`||}|rtj|dSdS)NTF)rr|sleeprs r"sleep_for_retryzRetry.sleep_for_retryKs7**844   J{ # # #4ur!cf|}|dkrdStj|dS)Nr)rqr|r)rQbackoffs r"_sleep_backoffzRetry._sleep_backoffSs7'')) a<< F 7r!BaseHTTPResponse | Nonecr|jr|r||}|rdS|dS)aBSleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it will use an exponential backoff. By default, the backoff factor is 0 and this method will return immediately. N)rBrr)rQrslepts r"rz Retry.sleepYsN  * x ((22E  r!err Exceptioncdt|tr|j}t|tS)zzErrors when we're fairly sure that the server did not receive the request, so it should be safe to retry. )r_r original_errorrrQrs r"_is_connection_errorzRetry._is_connection_erroris/ c: & & %$C#2333r!c:t|ttfS)zErrors that occur after the request has been started, so we should assume that the server began processing it. )r_r r rs r"_is_read_errorzRetry._is_read_errorqs# 0-@AAAr!rcN|jr||jvrdSdS)zyChecks if a given HTTP method should be retried upon, depending if it is included in the allowed_methods FT)r6upper)rQrs r"_is_method_retryablezRetry._is_method_retryablews.   FLLNN$:N$N$N5tr!F status_coderwhas_retry_afterc||sdS|jr ||jvrdSt|jo|jo |o||jvS)awIs this method/status code retryable? (Based on allowlists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to be retried upon on the presence of the aforementioned header) FT)rr8r>r0rBRETRY_AFTER_STATUS_CODES)rQrrrs r"is_retryzRetry.is_retrysx((00 5   [D4I%I%I4 J ?/ ? ? ==    r!cd|j|j|j|j|j|jfD}|sdSt |dkS)zAre we out of retries?cg|]}||Sr r )rLrgs r" z&Retry.is_exhausted..s0          r!Fr)r0r2r3r4rr5rn)rQ retry_countss r" is_exhaustedzRetry.is_exhaustedsd              5<  1$$r!rrrr_poolConnectionPool | None _stacktraceTracebackType | Nonec 0|jdur |rtt||||j}||dz}|j}|j} |j} |j} |j} d} d}d}|r?||r*|durtt|||||dz}n|rV| |rA| dus|| |stt|||| | dz} n|r| | dz} n|r=| r)| | dz} d} | }|r|}|j}nHtj } |r:|jr3| | dz} tj|j} |j}|jt#|||||fz}|||| | | | |}|r#|pt| }t)||||t*d|||S) aReturn a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.BaseHTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. FNrunknownztoo many redirects)r)r0r2r3r4rr5r@z$Incremented Retry for (url='%s'): %r)r0rrXr2r3r4rr5rrrget_redirect_locationr GENERIC_ERRORSPECIFIC_ERRORformatr@rrZrr r`ra)rQrrrrrrr0r2r3r4 status_countr5causerrresponse_redirect_locationr@ new_retryreasons r" incrementzRetry.increments& :  5 $u++uk:: :    QJE,y={    % )T..u55% )%d5kk5+>>>$1   )t**511 )u}}d6O6OPV6W6Wd5kk5+>>>!   )    )(88:: )#A (E)1)G)G)I)I &) ?$>!_FF "/E )HO )+ A%L%4;;;XX!, 63v7H I I"  HH    ! ! # # @2mE22FsF33 ? 8#yIIIr!c t|jd|jd|jd|jd|jd|jd S)Nz(total=z , connect=z, read=z , redirect=z , status=))rXrr0r2r3r4r)rQs r"__repr__zRetry.__repr__ sxDzz" Q Q4: Q Q Q QI Q Q*.- Q QBF+ Q Q Q r!)"r0r1r2rr3rr4r1rrr5rr6r7r8r9r:r;r<r;r=r>r?r>r@rArBr>rCrDrEr;rFrG)rSrTrFr$)TN)r[r\r4r1r]r\rFr$)rFr;)rrrsrFr;)rrrFr)rrrFr>)rFrGrJ)rrrFrG)rrrFr>)rrsrFr>)F)rrsrrwrr>rFr>)rFr>)NNNNNN)rrrrrrrrrrrrrFr$)rFrs)rrr__doc__rPDEFAULT_ALLOWED_METHODSr"DEFAULT_REMOVE_HEADERS_ON_REDIRECTDEFAULT_BACKOFF_MAXrrRrZ classmethodrdrqrrrrrrrrrrrrr r!r"r$r$'sLKK\(i<<< )y99*3O3D)E)E&$###$&"&*! 9P:> !0"& $59+/ / #'+-+-+-+-+-Z$$$$.'+-1 [$CCCC&"3333  4444BBBB FK     ,%%%%*",0"&'+,0_____B      r!r$)" __future__rrxloggingrlrur|typing itertoolsrtypesr exceptionsrrr r r r r utilr TYPE_CHECKINGconnectionpoolrrr getLoggerrr` NamedTuplerr$r-r r!r"rs""""""   ,//////++++++g!!"""""V&"""f f f f f f f f Ta r!PKQZcDD%util/__pycache__/ssl_.cpython-311.pycnu[ bg,KUddlmZddlZddlZddlZddlZddlZddlZddlm Z ddl m Z m Z m Z ddlmZmZddlmZmZdZdZd Zd Zd Zd gZejeeeeefZe e e d ZdXdZdYdZ ej!r$ddl"m#Z#ddl$m%Z%m&Z&ddl'mZ(Gdde&d Z)iZ*de+d < ddl"Z"dd!l"m,Z,mZm-Z-m.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4mZm5Z5e1Z6er7e e/e0ej7j8ej9ej7j8d"krej:ndsd Zd#D]*Z; e$rd%Z-d&Z.d'Z3d(Z4dxZ6Z1d)Z2YnwxYwej?de@dfZAdZd.ZBd[d2ZCd\d3ZD d]d^d=ZEejF d_d`dNZGejF d_dadPZG dbdadQZGdcdTZHdddVZI dedfdWZJdS)g) annotationsN) unhexlify)md5sha1sha256)ProxySchemeUnsupportedSSLError)_BRACELESS_IPV6_ADDRZ_RE_IPV4_REFzhttp/1.1) (@implementation_namestr version_info_TYPE_VERSION_INFOpypy_version_info_TYPE_VERSION_INFO | Nonereturnboolc|dkr |dko|dkS|dkr0|dd}|d}|dkr|dkp|dko|d kp|d kSd S) aReturn True for CPython 3.8.9+, 3.9.3+ or 3.10+ and PyPy 7.3.8+ where setting SSLContext.hostname_checks_common_name to False works. Outside of CPython and PyPy we don't know which implementations work or not so we conservatively use our hostname matching as we know that works on all implementations. https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963 https://foss.heptapod.net/pypy/pypy/-/issues/3539 pypy))rrcpythonNr )rrr)r F)rrr major_minormicros H/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/ssl_.py_is_bpo_43522_fixedr%sf$$ I-H,&2HH  ) )"2A2& Q F " 1uz &v%4%1* &g% uopenssl_versionopenssl_version_numberintcb|d}|dk}|o|pt|||S)NzOpenSSL i) startswithr%)r'r(rrr is_opensslis_openssl_issue_14579_fixeds r$(_is_has_never_check_common_name_reliabler.:sK!++J77J $:Z#G  $ U 2LBS T Tr&) VerifyMode)Literal TypedDict) SSLTransportc.eZdZUded<ded<ded<dS)_TYPE_PEER_CERT_RET_DICTztuple[tuple[str, str], ...]subjectAltNamez'tuple[tuple[tuple[str, str], ...], ...]subjectr serialNumberN)__name__ __module__ __qualname____annotations__r!r&r$r4r4Ws633338888r&r4)totalzdict[int, int]_SSL_VERSION_TO_TLS_VERSION) CERT_REQUIREDHAS_NEVER_CHECK_COMMON_NAMEOP_NO_COMPRESSION OP_NO_TICKETOPENSSL_VERSIONOPENSSL_VERSION_NUMBER PROTOCOL_TLSPROTOCOL_TLS_CLIENT OP_NO_SSLv2 OP_NO_SSLv3 SSLContext TLSVersionr)TLSv1TLSv1_1TLSv1_2 PROTOCOL_ii@iicert bytes | None fingerprintNonec|td|dd}t|}t|}|std|t |}||}tj ||s(td|d| ddS) z Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. NzNo certificate for the peer.:zFingerprint of invalid length: z&Fingerprints did not match. Expected "z", got "") r replacelowerlen HASHFUNC_MAPgetrencodedigesthmaccompare_digesthex)rOrQ digest_lengthhashfuncfingerprint_bytes cert_digests r$assert_fingerprintres |5666%%c2..4466K $$M ..H HFFFGGG"+"4"4"6"677(4..''))K  {,= > >  ^[ ^ ^+//J[J[ ^ ^ ^     r& candidateNone | int | strr/c|tSt|tr2tt|d}|ttd|z}|S|S)a Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. NCERT_)r> isinstancergetattrsslrfress r$resolve_cert_reqsrosW)S!!c9d++ ;#w233C r&c|tSt|trJtt|d}|ttd|z}t jt|S|S)z like resolve_cert_reqs NrM)rDrjrrkrltypingcastr)rms r$resolve_ssl_versionrssd)S!!%c9d++ ;#{Y677C{3$$$ r& ssl_version int | None cert_reqsoptionsciphers str | Nonessl_minimum_versionssl_maximum_versionssl.SSLContextcttd|dttfvry||t dt |tj}t |tj }tj dtdtt}|||_ ntj|_ |||_|r||| t"jn|}|*d}|t&z}|t(z}|t*z}|t,z}|xj|zc_|t"jkst0jdkrt5|d d d |_|t"jkrt8s||_d |_nd |_||_ d |_n#t@$rYnwxYwtC|d r(tDj#d }|r||_$|S)acCreates and configures an :class:`ssl.SSLContext` instance for use with urllib3. :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. This parameter is deprecated instead use 'ssl_minimum_version'. :param ssl_minimum_version: The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. :param ssl_maximum_version: The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the default value. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. Defaults to either system configured ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers. :returns: Constructed SSLContext object with specified options :rtype: SSLContext Nz7Can't create an SSLContext object without an ssl modulezZCan't specify both 'ssl_version' and either 'ssl_minimum_version' or 'ssl_maximum_version'zk'ssl_version' option is deprecated and will be removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'r)category stacklevelr)rrpost_handshake_authTFkeylog_filename SSLKEYLOGFILE)%rH TypeErrorrDrE ValueErrorr=r[rIMINIMUM_SUPPORTEDMAXIMUM_SUPPORTEDwarningswarnDeprecationWarningminimum_versionrLmaximum_version set_ciphersrlr>rFrGr@rArwsysrrkr IS_PYOPENSSL verify_modecheck_hostnamehostname_checks_common_nameAttributeErrorhasattrosenvironr)rtrvrwrxrzr{context sslkeylogfiles r$create_urllib3_contextrsZFQRRR4/BCCC  *.A.MA #>"A"AZ9## #>"A"AZ9##  MO+     ,--G&"5","4&"5%G$$$&/%6!!II;; $$ < OOwOO S&&&#*:i*G*GW&NN N'+#C%%%l%'!%!&' .3++      w)**4 77  4&3G # Ns*F22 F?>F?.sock socket.socketkeyfilecertfileca_certsserver_hostname ssl_contextssl.SSLContext | None ca_cert_dir key_password ca_cert_dataNone | str | bytes tls_in_tlsLiteral[False] ssl.SSLSocketc dSNr! rrrrvrrrtrxrrrrrs r$ssl_wrap_socketrh Cr& ssl.SSLSocket | SSLTransportTypec dSrr!rs r$rr{rr&c $|} | t|||} |s| s| r; | || | nH#t$r}t||d}~wwxYw|$t | dr| |r | t |rtd|r0| | ||n| |||  | tn#t$rYnwxYwt|| | |}|S)a All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket. N)rxload_default_certsz5Client private key is encrypted, password is required) rload_verify_locationsOSErrorr rr_is_key_file_encryptedload_cert_chainset_alpn_protocolsALPN_PROTOCOLSNotImplementedError_ssl_wrap_socket_impl)rrrrvrrrtrxrrrrrressl_socks r$rrsuLG)iQQQ%;%,% %  ) )(K N N N N % % %1++1 $ %  2F!G!G ""$$$ P<',B7,K,K'NOOOE    # #Hg 6 6 6 6  # #Hg| D D D "">2222      %T7JPPH Os'6 AAAC.. C;:C;hostname str | bytesct|tr|d}tt j|pt j|S)zDetects whether the hostname given is an IPv4 or IPv6 address. Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. ascii)rjbytesdecoderr matchr )rs r$ is_ipaddressrsP(E"",??7++ x((T,D,J8,T,T U UUr&key_filect|5}|D]}d|vrddddS dddn #1swxYwYdS)z*Detects if a key file is encrypted or not. ENCRYPTEDNTF)open)rflines r$rrs h1  Dd"" #  5s 5599c|r;tstdtj|t|||S|||S)Nz0TLS in TLS requires support for the 'ssl' module)r)r2r $_validate_ssl_context_for_tls_in_tls wrap_socket)rrrrs r$rrsh @ (B  9+FFFD+???  " "4 " I IIr&)rrrrrrrr) r'rr(r)rrrrrrrr)rOrPrQrrrR)rfrgrr/)rfrgrr))NNNNNN)rtrurvrurwrurxryrzrur{rurr|) ............)rrrryrryrvrurryrryrtrurxryrrrryrryrrrrrr)rrrryrryrvrurryrryrtrurxryrrrryrryrrrrrr) NNNNNNNNNNNF)rrrr)rrrrr) rrrr|rrrryrr)K __future__rr^rsocketrrqrbinasciirhashlibrrr exceptionsr r urlr r rHr2r?rIS_SECURETRANSPORTrTupler)rrrZr%r. TYPE_CHECKINGrlr/typing_extensionsr0r1 ssltransportSSLTransportTyper4r=r;r>r@rArBrCrDrErFrGrIPROTOCOL_SSLv23implementationnamerrattrrkr ImportErrorUnionr_TYPE_PEER_CERT_RETrerorsroverloadrrrrr!r&r$rs4""""""" %%%%%%%%%%9999999933333333   # \#sCc"9:Tv.. <,  44444444>>>>>>9E/10000/JJJ                            #O#,+S+S  !$!3!8F!B!B ,,,',#0 LSGDMM '5G5G5G(H(H I I    H +******LKK%&&Ol l#=ud#JK    <.    "# &*&* KKKKK\"%!),!"'*!$$"%!),!"'*( "&")-"#'+HHHHHV V V V V#' JJJJJJJs7 AD/DD/D%"D/$D%% D//EEPKQZlD|hh3util/__pycache__/ssl_match_hostname.cpython-311.pycnu[ bgdZddlmZddlZddlZddlZddlmZmZejrddl m Z dZ Gdd e Z dddZd dZ d!d"dZdS)#zHThe match_hostname() function from Python 3.5, essential when using SSL.) annotationsN) IPv4Address IPv6Address)_TYPE_PEER_CERT_RET_DICTz3.5.0.1ceZdZdS)CertificateErrorN)__name__ __module__ __qualname__V/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/ssl_match_hostname.pyr r sDrr dn typing.Anyhostnamestr max_wildcardsintreturntyping.Match[str] | None | boolcg}|sdS|d}|d}|dd}|d}||krtdt|z|s7t ||kS|dkr|dn|d s|d r(|tj |n;|tj | d d |D])}|tj |*tj d d |zdztj } | |S)zhMatching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 F.rrN*z,too many wildcards in certificate DNS name: z[^.]+zxn--z\*z[^.]*z\Az\.z\Z)splitcountr reprboollowerappend startswithreescapereplacecompilejoin IGNORECASEmatch) rrrpatspartsleftmost remainder wildcardsfragpats r_dnsname_matchr0s D u HHTNNEQxHabb Is##I=   :T"XX E   4BHHJJ(.."2"22333 3 G   V $ $A(;(;F(C(CA BIh''(((( BIh''//w??@@@%% BIdOO$$$$ *UUZZ---5r} E EC 99X  ripnamehost_ipIPv4Address | IPv6Addressrctj|}t|j|jkS)aExact matching of IP addresses. RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded bytes of the IP address. An IP version 4 address is 4 octets, and an IP version 6 address is 16 octets. [...] A reference identity of type IP-ID matches if the address is identical to an iPAddress value of the subjectAltName extension of the certificate." ) ipaddress ip_addressrstriprpacked)r1r2ips r_ipaddress_matchr:Ps3  fmmoo . .B  W^+ , ,,rFcert_TYPE_PEER_CERT_RET_DICT | Nonehostname_checks_common_nameNonec |std d|vr0tj|d|d}ntj|}n#t$rd}YnwxYwg}|dd}|D]f\}}|dkr+|t ||rdS||6|dkr*|t||rdS||g|rT|R|sP|ddD]9}|D]4\}}|d kr)t ||rdS||5:t|d kr;td |d d tt|t|d krtd |d|dtd)a)Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. ztempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIRED%NsubjectAltNamer DNSz IP Addresssubject commonNamerz hostname z doesn't match either of z, z doesn't match rz/no appropriate subjectAltName fields were found) ValueErrorr5r6rfindgetr0r r:lenr r&mapr) r;rr=r2dnsnamessankeyvaluesubs rmatch_hostnamerO_sI   -    (??*84IhnnS6I6I4I+JKKGG*844G H'+xx0@"'E'EC## U %<<>%#B#B OOE " " " " L "'7w'G'G" OOE " " "#+wx88Ir** + +C! + + U,&&%eX66OOE***  +  8}}q,4HHdiiD(@S@S6T6T6T V    X!  U8UUhqkUUVVVPQQQsAA A+*A+)r)rrrrrrrr)r1rr2r3rr)F)r;r<rrr=rrr>)__doc__ __future__rr5r"typingrr TYPE_CHECKINGssl_r __version__rEr r0r:rOr rrrVs NN #""""" ........ /......      z   9:55555p - - - -$).@R@R@R@R@R@R@RrPKQZ4949-util/__pycache__/ssltransport.cpython-311.pycnu[ bgU#ddlmZddlZddlZddlZddlZddlmZejrddl m Z ddl m Z m Z ejdd ZejeefZejd Zd ZGd d ZdS)) annotationsN)ProxySchemeUnsupported)Literal)_TYPE_PEER_CERT_RET_TYPE_PEER_CERT_RET_DICT_SelfT SSLTransport)bound _ReturnValuei@ceZdZdZed]dZ d^d_dZd`dZdadZdbdZ dcdddZ dedfd!Z dgdhd&Z didjd)Z didkd*Z dldddd+dmd3Zdnd4Zdnd5Zej dodpd:Zejdqd=Zdrdsd@ZdtdAZdudCZdtdDZdtdEZdvdGZdtdHZdwdKZdxdLZdndMZdldydOZejdzdRZejd{dUZejd|dXZ d}d~d\ZdS)r aL The SSLTransport wraps an existing socket and establishes an SSL connection. Contrary to Python's implementation of SSLSocket, it allows you to chain multiple TLS connections together. It's particularly useful if you need to implement TLS within TLS. The class supports most of the socket API operations. ssl_contextssl.SSLContextreturnNonecDt|dstddS)z Raises a ProxySchemeUnsupported if the provided ssl_context can't be used for TLS in TLS. The only requirement is that the ssl_context provides the 'wrap_bio' methods. wrap_biozXTLS in TLS requires SSLContext.wrap_bio() which isn't available on non-native SSLContextN)hasattrr)rs P/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/ssltransport.py$_validate_ssl_context_for_tls_in_tlsz1SSLTransport._validate_ssl_context_for_tls_in_tls"s7{J// (5   NTsocket socket.socketserver_hostname str | Nonesuppress_ragged_eofsboolctj|_tj|_||_||_||j|j||_||jj dS)zV Create an SSLTransport around socket using the provided ssl_context. )rN) ssl MemoryBIOincomingoutgoingrrrsslobj _ssl_io_loop do_handshake)selfrrrrs r__init__zSSLTransport.__init__2su    $8! !** M4=/+   $+233333rr'r c|SNr's r __enter__zSSLTransport.__enter__Is r_ typing.Anyc.|dSr*)close)r'r.s r__exit__zSSLTransport.__exit__Ls rintc4|jSr*)rfilenor,s rr5zSSLTransport.filenoO{!!###rlenbuffertyping.Any | None int | bytesc.|||Sr*)_wrap_ssl_read)r'r8r9s rreadzSSLTransport.readRs""3///rrbuflenflagscV|dkrtd||S)Nrz+non-zero flags not allowed in calls to recv) ValueErrorr=)r'r?r@s rrecvzSSLTransport.recvUs. A::JKK K""6***r _WriteBuffernbytes int | NoneNone | int | bytescz|dkrtd|t|}|||S)Nrz0non-zero flags not allowed in calls to recv_into)rBr8r>)r'r9rEr@s r recv_intozSSLTransport.recv_intoZs@ A::OPP P >[[Fyy(((rdatabytescX|dkrtdd}t|5}|d5}t|}||kr(|||d}||z }||k(dddn #1swxYwYddddS#1swxYwYdS)Nrz.non-zero flags not allowed in calls to sendallB)rB memoryviewcastr8send)r'rJr@countview byte_viewamountvs rsendallzSSLTransport.sendallfs1 A::MNN N    tyy~~ ^^F&..IIi/00 &..                                 s4B>B; BB BB BB#&B#cl|dkrtd||jj|S)Nrz+non-zero flags not allowed in calls to send)rBr%r$write)r'rJr@s rrPzSSLTransport.sendps5 A::JKK K  !2D999r)encodingerrorsnewlinemodestr bufferingrYrZr[1typing.BinaryIO | typing.TextIO | socket.SocketIOc.t|hdkstd|dd|v}d|vp| }|s|sJd|v}d} |r| dz } |r| dz } tj|| } |jxjdz c_|d }|d kr t j}|d kr|std | S|r|rt j| | |} n1|rt j| |} n|sJt j | |} |r| St j | |||} || _ | S) a Python's httpclient uses makefile and buffered io when reading HTTP messages and we need to support it. This is unfortunately a copy and paste of socket.py makefile with small changes to point to the socket directly. >brwz invalid mode z (only r, w, b allowed)rcrbrarNrz!unbuffered streams must be binary) setrBrSocketIO_io_refsioDEFAULT_BUFFER_SIZEBufferedRWPairBufferedReaderBufferedWriter TextIOWrapperr\) r'r\r^rYrZr[writingreadingbinaryrawmoderawr9texts rmakefilezSSLTransport.makefileus 4yyOOO++LTLLLMM M++,W!'!!!   sNG   sNGodG,, !  I q==.I >> F !DEEEJ  7w 7&sC;;FF  7&sI66FFNN7&sI66F  M&'BB  rcD||jjdSr*)r%r$unwrapr,s rrwzSSLTransport.unwraps! $+,-----rc8|jdSr*)rr1r,s rr1zSSLTransport.closes r. binary_formLiteral[False]_TYPE_PEER_CERT_RET_DICT | NonecdSr*r+r'rys r getpeercertzSSLTransport.getpeercerts  r Literal[True] bytes | NonecdSr*r+r}s rr~zSSLTransport.getpeercert rFrc6|j|Sr*)r$r~r}s rr~zSSLTransport.getpeercerts{&&{333rc4|jSr*)r$versionr,s rrzSSLTransport.versions{""$$$rtuple[str, str, int] | Nonec4|jSr*)r$cipherr,s rrzSSLTransport.cipherr6rc4|jSr*)r$selected_alpn_protocolr,s rrz#SSLTransport.selected_alpn_protocols{11333rc4|jSr*)r$selected_npn_protocolr,s rrz"SSLTransport.selected_npn_protocols{00222r!list[tuple[str, str, int]] | Nonec4|jSr*)r$shared_ciphersr,s rrzSSLTransport.shared_cipherss{))+++rc4|jSr*)r$ compressionr,s rrzSSLTransport.compressions{&&(((rvalue float | Nonec:|j|dSr*)r settimeout)r'rs rrzSSLTransport.settimeouts u%%%%%rc4|jSr*)r gettimeoutr,s rrzSSLTransport.gettimeouts{%%'''rc8|jdSr*)r_decref_socketiosr,s rrzSSLTransport._decref_socketioss %%'''''rbytearray | Nonec ||jj||S#tj$r(}|jtjkr |jrYd}~dSd}~wwxYwNr)r%r$r>r SSLErrorerrno SSL_ERROR_EOFr)r'r8r9es rr=zSSLTransport._wrap_ssl_readsm $$T[%5sFCC C|   w#+++0I+qqqqq  s #AAAAfunctyping.Callable[[], None]cdSr*r+)r'rs rr%zSSLTransport._ssl_io_looprrtyping.Callable[[bytes], int]arg1cdSr*r+)r'rrs rr%zSSLTransport._ssl_io_looprr/typing.Callable[[int, bytearray | None], bytes]arg2cdSr*r+)r'rrrs rr%zSSLTransport._ssl_io_loops  r"typing.Callable[..., _ReturnValue]None | bytes | intr cfd}d}|rd} | | |}n| ||}n |||}nD#tj$r2}|jtjtjfvr||j}Yd}~nd}~wwxYw|j}|j||d}ne|tjkrU|j t}|r|j |n|j |tjt |S)z>Performs an I/O loop between incoming/outgoing and the socket.TNF)r rrSSL_ERROR_WANT_READSSL_ERROR_WANT_WRITEr#r>rrVrC SSL_BLOCKSIZEr"rX write_eoftypingrOr ) r'rrr should_loopretrrbufs rr%zSSLTransport._ssl_io_loopsX  .E rCrIrVrPrurwr1roverloadr~rrrrrrrrrr=r%r+rrr r s5   \ &'+%) 44444.$$$$00000+++++" ) ) ) ) ):::::!%2 $!"222222h.... _,/    _  _   _ 44444%%%%$$$$44443333,,,,))))&&&&(((((((( _   _  _   _  _   _ $(!% $.$.$.$.$.$.$.r) __future__rrirr r exceptionsr TYPE_CHECKINGtyping_extensionsrssl_rr TypeVarr Union bytearrayrNrDr rr r+rrrs"""""" ////// D))))))CCCCCCCC  7 7 7|Iz12 v~n--  A.A.A.A.A.A.A.A.A.A.rPKQZRT//(util/__pycache__/timeout.cpython-311.pycnu[ bg!)UddlmZddlZddlZddlmZddlmZddlm Z ej rddl m Z Gdd eZ e jZd ed <ejejee fZGd d ZdS)) annotationsN)Enum)getdefaulttimeout)TimeoutStateError)FinalceZdZdZdS) _TYPE_DEFAULTN)__name__ __module__ __qualname__tokenK/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/timeout.pyr r s EEErr zFinal[_TYPE_DEFAULT]_DEFAULT_TIMEOUTceZdZUdZeZded<deefdd Zdd ZeZ e ddZ e ddZ e ddZddZd dZd dZed!dZed"dZdS)#Timeouta Timeout configuration. Timeouts can be defined as a default for a pool: .. code-block:: python import urllib3 timeout = urllib3.util.Timeout(connect=2.0, read=7.0) http = urllib3.PoolManager(timeout=timeout) resp = http.request("GET", "https://example.com/") print(resp.status) Or per-request (which overrides the default for the pool): .. code-block:: python response = http.request("GET", "https://example.com/", timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``: .. code-block:: python no_timeout = Timeout(connect=None, read=None) response = http.request("GET", "https://example.com/", timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: int, float, or None :param connect: The maximum amount of time (in seconds) to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout for connection attempts. :type connect: int, float, or None :param read: The maximum amount of time (in seconds) to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout. :type read: int, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. _TYPE_TIMEOUTDEFAULT_TIMEOUTNtotalconnectreadreturnNonec||d|_||d|_||d|_d|_dS)Nrrr)_validate_timeout_connect_readr_start_connect)selfrrrs r__init__zTimeout.__init__qsW ..w BB ++D&99 ++E7;; ,0rstrc`t|jd|jd|jd|jdS)Nz (connect=z, read=z, total=))typer rr rr"s r__repr__zTimeout.__repr__|s:t**%mm mm mm^b^hmmmmrtimeout float | Nonec4|turtn|S)N)rr)r*s rresolve_default_timeoutzTimeout.resolve_default_timeouts&-1A&A&A """wNrvaluenamech| |tur|St|trtd t |n,#t tf$rtd|d|ddwxYw |dkrtd|d|d n%#t $rtd|d|ddwxYw|S) aCheck that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. NzDTimeout cannot be a boolean value. It must be an int, float or None.zTimeout value z was z', but it must be an int, float or None.rzAttempted to set z timeout to zC, but the timeout cannot be set to a value less than or equal to 0.)r isinstancebool ValueErrorfloat TypeError)clsr.r/s rrzTimeout._validate_timeouts) =E%555L eT " " ,   %LLLL:&   *)-uuu6   zz j.2TT555:    *)-uuu6    sA)A,0B "B/c$t||S)aCreate a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, :attr:`urllib3.util.Timeout.DEFAULT_TIMEOUT`, or None :return: Timeout object :rtype: :class:`Timeout` )rr)r)r6r*s r from_floatzTimeout.from_floatsGW5555rcDt|j|j|jS)aCreate a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` )rrr)rrr rr(s rclonez Timeout.clonest}4:TZPPPPrr4cl|jtdtj|_|jS)zStart the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. Nz'Timeout timer has already been started.r!rtime monotonicr(s r start_connectzTimeout.start_connects5   *#$MNN N"n..""rcd|jtdtj|jz S)aGets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time in seconds. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. Nz:Can't get connect duration for timer that has not started.r<r(s rget_connect_durationzTimeout.get_connect_durations:   &#L ~$"555rc|j|jS|j|jtur|jSt|j|jS)a!Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None )rrrminr(s rconnect_timeoutzTimeout.connect_timeoutsE : = = DM5E$E$E: 4=$*---rc|jn|jtur`|jY|jturK|j|jSt dt |j|z |jS|j8|jtur*t d|j|z S||jS)a{Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. Nr)rrr r!maxrCrAr-r(s r read_timeoutzTimeout.read_timeouts$ J " "222 & "222"*z!q#dj4+D+D+F+FF SSTT T Z # :J(J(Jq$*t'@'@'B'BBCC C// ;; ;r)rrrrrrrr)rr$)r*rrr+)r.rr/r$rr)r*rrr)rr)rr4)rr)rr+)r r r__doc__rr__annotations__r#r)__str__ staticmethodr- classmethodrr8r:r?rApropertyrDrGrrrrrs\RRj&6O5555 $!1. 1 1 1 1 1nnnnGOOO\O'''['R 6 6 6[ 6 Q Q Q Q # # # # 6 6 6 6...X."<<<X<<rWs""""""" $$$$$$****** (''''''D *7)<<<<< UM-A BC ~<~<~<~<~<~<~<~<~<~Z2dMd?Z2dNdBZ3dOdDZ4dPdFZ5dS)Q) annotationsN)LocationParseError)to_str)httphttpsNz%[a-fA-F0-9]{2}z^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)zS^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?(?://([^\\/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?$z(?:[0-9]{1,3}\.){3}[0-9]{1,3}z[0-9A-Fa-f]{1,4}z(?:{hex}:{hex}|{ipv4}))hexipv4)r ls32) z(?:%(hex)s:){6}%(ls32)sz::(?:%(hex)s:){5}%(ls32)sz%(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)sz2(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)sz6(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)sz/(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)sz'(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)sz&(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)sz(?:(?:%(hex)s:){0,6}%(hex)s)?::zCABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~z(?:|c"g|] }|tz S)_subs).0xs G/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/url.py r4s===Aa%i===)z (?:%25|%)(?:[z]|%[a-fA-F0-9]{2})+z\[z)?\]z!(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*z!^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$^$(z)\]$z^(z)(?::0*?(|0|[1-9][0-9]{0,4}))?$zBABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~z !$&'()*+,;=:@/?ceZdZdZ ddfd ZeddZeddZeddZeddZ eddZ ddZ xZ S)Urlz Data structure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. Nscheme str | Noneauthhostport int | Nonepathqueryfragmentc |r|dsd|z}||}t||||||||S)Nr) startswithlowersuper__new__) clsr!r#r$r%r'r(r) __class__s rr.z Url.__new__ds`  ,, :D  \\^^FwwsFD$dE8TTTrreturnc|jS)z@For backwards-compatibility with urlparse. We're nice like that.)r$selfs rhostnamez Url.hostnamets yrstrc@|jpd}|j |d|jzz }|S)z)Absolute path including the query string.rNr)r'r()r4uris r request_urizUrl.request_uriys.i3 : ! 3# #C rc8|j}|j}|||S|d|S)z Authority component as defined in RFC 3986 3.2. This includes userinfo (auth), host and port. i.e. userinfo@host:port Nr)r#netloc)r4userinfor;s r authorityz Url.authoritys59 >X-M)))) )rcR|jdS|jr|jd|jS|jS)z Network location including host and port. If you need the equivalent of urllib.parse's ``netloc``, use the ``authority`` property instead. Nr)r$r%r3s rr;z Url.netlocs; 9 4 9 .i--$)-- -yrc|\}}}}}}}d}|||dzz }|||dzz }|||z }||dt|zz }|||z }||d|zz }||d|zz }|S)a2 Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: .. code-block:: python import urllib3 U = urllib3.util.parse_url("https://google.com/mail/") print(U.url) # "https://google.com/mail/" print( urllib3.util.Url("https", "username:password", "host.com", 80, "/path", "query", "fragment" ).url ) # "https://username:password@host.com:80/path?query#fragment" Nz://rrr#)r6) r4r!r#r$r%r'r(r)urls rrBzUrl.urls6;?7dD$x   6E> !C   4#: C   4KC   3T? "C   4KC   3; C   3> !C rc|jSN)rBr3s r__str__z Url.__str__s xr)NNNNNNN)r!r"r#r"r$r"r%r&r'r"r(r"r)r")r1r")r1r6) __name__ __module__ __qualname____doc__r.propertyr5r9r=r;rBrE __classcell__)r0s@rr r Ps" #UUUUUUU XX * * *X *   X ---X-^rr r!r#r$r%r'r(r) componentr6 allowed_charstyping.Container[str]r1cdSrDrrLrMs r_encode_invalid_charsrQ CrNonecdSrDrrPs rrQrQrRrr"c||St|}td|\}}|dd}||dk}t }t dt|D]}|||dz}t|}|r|dks|dkr| |vr||z }H| dt|d d d  z| S) zfPercent-encodes a URI component without reapplying onto an already percent-encoded component. NcP|dS)Nr)groupupper)matchs rz'_encode_invalid_chars..sekk!nn**,,rzutf-8 surrogatepass%rrr)r _PERCENT_REsubnencodecount bytearrayrangelenorddecodeextendr zfillrX) rLrMpercent_encodings uri_bytesis_percent_encodedencoded_componentibytebyte_ords rrQrQsT y!!I $/#3#3,,i$$ I   /::I*iood.C.CC!  1c)nn % % W WQU#t99  44<< sNNt{{}} ==  %    Xqrr):)A)A)C)C)I)I!)L)L)R)R)T)T!UVVVV  # # % %%rc|d}g}|D];}|dkr |dkr||%|r|<|dr |r|dr|dd|dr|dd|S)Nr.z..rr@)z/.z/..)splitappendpopr+insertendswithjoin)r'segmentsoutputsegments r_remove_path_dot_segmentsr{szz#H F   c>>  d?? MM' " " " "  JJLLL sVvay a }}]## b 88F  rcdSrDrr$r!s r_normalize_hostr~%CrcdSrDrr}s rr~r~*rrc|rB|tvr8t|}|rt|}|r|d\}}|||}|dr|dkr |dd}n |dd}t|t}|d| d|||dS| St|s@td d| dDdS|S) Nrz%25%.c,g|]}t|Sr) _idna_encode)rlabels rrz#_normalize_host..Fs PPPu|E22PPPrrqascii)_NORMALIZABLE_SCHEMES_IPV6_ADDRZ_RErY _ZONE_ID_REsearchspanr+rQ_UNRESERVED_CHARSr,_IPV4_RErrwrr)r$r!is_ipv6rYstartendzone_ids rr~r~/sU  * * *$**400G $**400 (!&AJE3"59oG))%00.W5E5E")!""+")!""+3G=NOOG"6E6l0022JJWJd344jJJJ::<<'^^D)) IIPP 3PPPQQ KrnamebytescZ|sq ddl}n#t$rtddwxYw ||ddS#|j$rtd|ddwxYw|dS)Nrz-Unable to parse URL without the 'idna' moduleT)strict std3_ruleszName 'z' is not a valid IDNA labelr)isasciiidna ImportErrorrr`r, IDNAError)rrs rrrLs <<>>   KKKK   $?    ;;tzz||DT;JJ J~   $::::   ::<<  w ' ''s6)A$$Btargetct|}|st|d|\}}t |t }|t |t }|d|zz }|S)zPercent-encodes a request target so that there are no invalid characters Pre-condition for this function is that 'target' must start with '/'. If that is the case then _TARGET_RE will always produce a match. z is not a valid request URINr) _TARGET_RErYrgroupsrQ _PATH_CHARS _QUERY_CHARS)rrYr'r(encoded_targets r_encode_targetr_s   V $ $E K F!I!I!IJJJ,,..KD%*4==N %e\::#+% rrBc |stS|}t|sd|z} t|\}}}}}|dup|tv}|r|}|rn|d\}} } |pd}t| \} } |r|rt|t}| dkrd} nd\}} } | .t| } d| cxkrdksnt|nd} t| |} |r&|r$t|}t|t }|r|rt|t"}|r|rt|t$}n)#t&t(f$r}t||d}~wwxYw|s ||d}nd}t||| | |||S) a Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. This parser is RFC 3986 and RFC 6874 compliant. The parser logic and helper functions are based heavily on work done in the ``rfc3986`` module. :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urllib.parse`. Example: .. code-block:: python import urllib3 print( urllib3.util.parse_url('http://google.com/mail/')) # Url(scheme='http', host='google.com', port=None, path='/mail/', ...) print( urllib3.util.parse_url('google.com:80')) # Url(scheme=None, host='google.com', port=80, path=None, ...) print( urllib3.util.parse_url('/foo?bar')) # Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) z//Nrr@)NNNri)r!r#r$r%r'r(r))r _SCHEME_REr_URI_RErYrr,r rpartition _HOST_PORT_RErQ_USERINFO_CHARSintrr~r{rr_FRAGMENT_CHARS ValueErrorAttributeError)rB source_urlr!r=r'r(r) normalize_urir#_ host_portr$r%port_intes r parse_urlrqsd8 uu J   S ! !Sj$43:==3E3E3L3L3N3N0 4$Q&,,..>@@JD$ D  D,T?CCrzz/ D$  4yyH****U****(---+HtV,,  >E  HX H,XGGH  '444 ,,!34    4DDD        sE9F--G>GG)rLr6rMrNr1r6)rLrSrMrNr1rS)rLr"rMrNr1r")r'r6r1r6)r$rSr!r"r1rS)r$r6r!r"r1r6)r$r"r!r"r1r")rr6r1r)rr6r1r6)rBr6r1r )6 __future__rretyping exceptionsrutilrrcompiler^rUNICODEDOTALLr _IPV4_PAT_HEX_PATformat _LS32_PATr _variations_UNRESERVED_PATrw _IPV6_PAT _ZONE_ID_PAT_IPV6_ADDRZ_PAT _REG_NAME_PATrr_IPV6_REr_BRACELESS_IPV6_ADDRZ_REr_HOST_PORT_PATrsetr_SUB_DELIM_CHARSrrrr NamedTupleOptionalr6rr overloadrQr{r~rrrrrrrs?"""""" ++++++0bj+,, RZ9 : : "* J    -  $ + +y + I I ),, *Y CHH=====>> > D 03HH )#f,|;gE4 RZ< = = 2:cIo+ , , 2:cIo+ , ,C/1C788%2:cOAbD,A&AC&GHHbj|+g566 MM IIOO  >2: +ABB CH3}%%#&66#>c * !,u!44 BBBBBF vs+ , V_S) * V_S) * V_S) * V_S) * foc* + - .   BBBJ  !&!&!&!&H<:((((&$ffffffrPKQZha9Fyy%util/__pycache__/util.cpython-311.pycnu[ bgzJddlmZddlZddlmZ ddd Z ddd Z dddZdS)) annotationsN) TracebackTypex str | bytesencoding str | Noneerrorsreturnbytesct|tr|St|ts$tdt |j|s|r||pd|pdS|SNznot expecting type zutf-8strict)r ) isinstancer str TypeErrortype__name__encoderrr s H/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/util.pyto_bytesrs!UB 3  B@d1gg.>@@AAAH6Hxx+GF4FhxGGG 88::rct|tr|St|ts$tdt |j|s|r||pd|pdS|Sr )rrr rrrdecoders rto_strrs!SB 5 ! !B@d1gg.>@@AAAH6Hxx+GF4FhxGGG 88::rtptype[BaseException] | Nonevalue BaseExceptiontbTracebackType | Nonetyping.NoReturncV |j|ur|||#d}d}wxYwN) __traceback__with_traceback)rrr s rreraiser'sA   b ( (&&r** *   s "()NN)rrrrr rr r )rrrrr rr rr$)rrrrr r!r r") __future__rtypingtypesrrrr'rrr,s"""""" GK     GK      $       rPKQZ%util/__pycache__/wait.cpython-311.pycnu[ bgGddlmZddlZddlZddlmZddgZ dddZ dddZddZ ddda dddZ dddZ dS)) annotationsN)partial wait_for_readwait_for_writeFsock socket.socketreadboolwritetimeout float | Nonereturnc|s|stdg}g}|r|||r||ttj|||}||\}}} t |p|p| S)N2must specify at least one of read=True, write=True) RuntimeErrorappendrselectr ) rr r r rcheckwcheckfnrreadywreadyxreadys H/opt/cloudlinux/venv/lib64/python3.11/site-packages/urllib3/util/wait.pyselect_wait_for_socketr!s QQOPPP F F  d  d  7 7BR[[FFF *&*F + ++c|s|stdd}|r|tjz}|r|tjz}tj||dfd }t ||S) Nrrtr rlist[tuple[int, int]]c<||dz}|S)Ni)poll)rpoll_objs rdo_pollz%poll_wait_for_socket..do_pollJs# = IA}}Qr)rr rr)rrPOLLINPOLLOUTr!registerr )rr r r maskr#r"s @rpoll_wait_for_socketr(9s QQOPPP D     {}}H dD!!!        ! !!rc tj}|ddS#ttf$rYdSwxYw)NrTF)rr!AttributeErrorOSError)r"s r_have_working_pollr,RsT;== at G $uus(,AActrtanttdrt at||||S)Nr)r,r(wait_for_sockethasattrrr)rr r r s rr.r._sD1.  " "10 4ug 6 66rc&t|d|S)zWaits for reading to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. T)r r r.rr s rrrqs 4dG < < <r9s""""""  , -8 ,,,,,4 """""2     77777$=====>>>>>>>rPKQZ&Qnnutil/connection.pynu[from __future__ import annotations import socket import typing from ..exceptions import LocationParseError from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT _TYPE_SOCKET_OPTIONS = typing.Sequence[typing.Tuple[int, int, typing.Union[int, bytes]]] if typing.TYPE_CHECKING: from .._base_connection import BaseHTTPConnection def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific """ Returns True if the connection is dropped and should be closed. :param conn: :class:`urllib3.connection.HTTPConnection` object. """ return not conn.is_connected # This function is copied from socket.py in the Python 2.7 standard # library test suite. Added to its signature is only `socket_options`. # One additional modification is that we avoid binding to IPv6 servers # discovered in DNS if the system doesn't have IPv6 functionality. def create_connection( address: tuple[str, int], timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, source_address: tuple[str, int] | None = None, socket_options: _TYPE_SOCKET_OPTIONS | None = None, ) -> socket.socket: """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`socket.getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith("["): host = host.strip("[]") err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() try: host.encode("idna") except UnicodeError: raise LocationParseError(f"'{host}', label empty or too long") from None for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not _DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) # Break explicitly a reference cycle err = None return sock except OSError as _: err = _ if sock is not None: sock.close() if err is not None: try: raise err finally: # Break explicitly a reference cycle err = None else: raise OSError("getaddrinfo returns an empty list") def _set_socket_options( sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None ) -> None: if options is None: return for opt in options: sock.setsockopt(*opt) def allowed_gai_family() -> socket.AddressFamily: """This function is designed to work in the context of getaddrinfo, where family=socket.AF_UNSPEC is the default and will perform a DNS search for both IPv6 and IPv4 records.""" family = socket.AF_INET if HAS_IPV6: family = socket.AF_UNSPEC return family def _has_ipv6(host: str) -> bool: """Returns True if the system can bind an IPv6 address.""" sock = None has_ipv6 = False if socket.has_ipv6: # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. # https://github.com/urllib3/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) sock.bind((host, 0)) has_ipv6 = True except Exception: pass if sock: sock.close() return has_ipv6 HAS_IPV6 = _has_ipv6("::1") PKQZL|| util/proxy.pynu[from __future__ import annotations import typing from .url import Url if typing.TYPE_CHECKING: from ..connection import ProxyConfig def connection_requires_http_tunnel( proxy_url: Url | None = None, proxy_config: ProxyConfig | None = None, destination_scheme: str | None = None, ) -> bool: """ Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc) """ # If we're not using a proxy, no way to use a tunnel. if proxy_url is None: return False # HTTP destinations never require tunneling, we always forward. if destination_scheme == "http": return False # Support for forwarding with HTTPS proxies and HTTPS destinations. if ( proxy_url.scheme == "https" and proxy_config and proxy_config.use_forwarding_for_https ): return False # Otherwise always use a tunnel. return True PKQZ¿util/request.pynu[from __future__ import annotations import io import typing from base64 import b64encode from enum import Enum from ..exceptions import UnrewindableBodyError from .util import to_bytes if typing.TYPE_CHECKING: from typing_extensions import Final # Pass as a value within ``headers`` to skip # emitting some HTTP headers that are added automatically. # The only headers that are supported are ``Accept-Encoding``, # ``Host``, and ``User-Agent``. SKIP_HEADER = "@@@SKIP_HEADER@@@" SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) ACCEPT_ENCODING = "gzip,deflate" try: try: import brotlicffi as _unused_module_brotli # type: ignore[import] # noqa: F401 except ImportError: import brotli as _unused_module_brotli # type: ignore[import] # noqa: F401 except ImportError: pass else: ACCEPT_ENCODING += ",br" try: import zstandard as _unused_module_zstd # type: ignore[import] # noqa: F401 except ImportError: pass else: ACCEPT_ENCODING += ",zstd" class _TYPE_FAILEDTELL(Enum): token = 0 _FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token _TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL] # When sending a request with these methods we aren't expecting # a body so don't need to set an explicit 'Content-Length: 0' # The reason we do this in the negative instead of tracking methods # which 'should' have a body is because unknown methods should be # treated as if they were 'POST' which *does* expect a body. _METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"} def make_headers( keep_alive: bool | None = None, accept_encoding: bool | list[str] | str | None = None, user_agent: str | None = None, basic_auth: str | None = None, proxy_basic_auth: str | None = None, disable_cache: bool | None = None, ) -> dict[str, str]: """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example: .. code-block:: python import urllib3 print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} print(urllib3.util.make_headers(accept_encoding=True)) # {'accept-encoding': 'gzip,deflate'} """ headers: dict[str, str] = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ",".join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING headers["accept-encoding"] = accept_encoding if user_agent: headers["user-agent"] = user_agent if keep_alive: headers["connection"] = "keep-alive" if basic_auth: headers[ "authorization" ] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}" if proxy_basic_auth: headers[ "proxy-authorization" ] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}" if disable_cache: headers["cache-control"] = "no-cache" return headers def set_file_position( body: typing.Any, pos: _TYPE_BODY_POSITION | None ) -> _TYPE_BODY_POSITION | None: """ If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. """ if pos is not None: rewind_body(body, pos) elif getattr(body, "tell", None) is not None: try: pos = body.tell() except OSError: # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body. pos = _FAILEDTELL return pos def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None: """ Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. """ body_seek = getattr(body, "seek", None) if body_seek is not None and isinstance(body_pos, int): try: body_seek(body_pos) except OSError as e: raise UnrewindableBodyError( "An error occurred when rewinding request body for redirect/retry." ) from e elif body_pos is _FAILEDTELL: raise UnrewindableBodyError( "Unable to record file position for rewinding " "request body during a redirect/retry." ) else: raise ValueError( f"body_pos must be of type integer, instead it was {type(body_pos)}." ) class ChunksAndContentLength(typing.NamedTuple): chunks: typing.Iterable[bytes] | None content_length: int | None def body_to_chunks( body: typing.Any | None, method: str, blocksize: int ) -> ChunksAndContentLength: """Takes the HTTP request method, body, and blocksize and transforms them into an iterable of chunks to pass to socket.sendall() and an optional 'Content-Length' header. A 'Content-Length' of 'None' indicates the length of the body can't be determined so should use 'Transfer-Encoding: chunked' for framing instead. """ chunks: typing.Iterable[bytes] | None content_length: int | None # No body, we need to make a recommendation on 'Content-Length' # based on whether that request method is expected to have # a body or not. if body is None: chunks = None if method.upper() not in _METHODS_NOT_EXPECTING_BODY: content_length = 0 else: content_length = None # Bytes or strings become bytes elif isinstance(body, (str, bytes)): chunks = (to_bytes(body),) content_length = len(chunks[0]) # File-like object, TODO: use seek() and tell() for length? elif hasattr(body, "read"): def chunk_readable() -> typing.Iterable[bytes]: nonlocal body, blocksize encode = isinstance(body, io.TextIOBase) while True: datablock = body.read(blocksize) if not datablock: break if encode: datablock = datablock.encode("iso-8859-1") yield datablock chunks = chunk_readable() content_length = None # Otherwise we need to start checking via duck-typing. else: try: # Check if the body implements the buffer API. mv = memoryview(body) except TypeError: try: # Check if the body is an iterable chunks = iter(body) content_length = None except TypeError: raise TypeError( f"'body' must be a bytes-like object, file-like " f"object, or iterable. Instead was {body!r}" ) from None else: # Since it implements the buffer API can be passed directly to socket.sendall() chunks = (body,) content_length = mv.nbytes return ChunksAndContentLength(chunks=chunks, content_length=content_length) PKQZr s'. . util/response.pynu[from __future__ import annotations import http.client as httplib from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect from ..exceptions import HeaderParsingError def is_fp_closed(obj: object) -> bool: """ Checks whether a given file-like object is closed. :param obj: The file-like object to check. """ try: # Check `isclosed()` first, in case Python3 doesn't set `closed`. # GH Issue #928 return obj.isclosed() # type: ignore[no-any-return, attr-defined] except AttributeError: pass try: # Check via the official file-like-object way. return obj.closed # type: ignore[no-any-return, attr-defined] except AttributeError: pass try: # Check if the object is a container for another file-like object that # gets released on exhaustion (e.g. HTTPResponse). return obj.fp is None # type: ignore[attr-defined] except AttributeError: pass raise ValueError("Unable to determine whether fp is closed.") def assert_header_parsing(headers: httplib.HTTPMessage) -> None: """ Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param http.client.HTTPMessage headers: Headers to verify. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """ # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): raise TypeError(f"expected httplib.Message, got {type(headers)}.") unparsed_data = None # get_payload is actually email.message.Message.get_payload; # we're only interested in the result if it's not a multipart message if not headers.is_multipart(): payload = headers.get_payload() if isinstance(payload, (bytes, str)): unparsed_data = payload # httplib is assuming a response body is available # when parsing headers even when httplib only sends # header data to parse_headers() This results in # defects on multipart responses in particular. # See: https://github.com/urllib3/urllib3/issues/800 # So we ignore the following defects: # - StartBoundaryNotFoundDefect: # The claimed start boundary was never found. # - MultipartInvariantViolationDefect: # A message claimed to be a multipart but no subparts were found. defects = [ defect for defect in headers.defects if not isinstance( defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) ) ] if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) def is_response_to_head(response: httplib.HTTPResponse) -> bool: """ Checks whether the request of a response has been a HEAD-request. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method. """ # FIXME: Can we do this somehow without accessing private httplib _method? method_str = response._method # type: str # type: ignore[attr-defined] return method_str.upper() == "HEAD" PKQZ=q GG util/retry.pynu[from __future__ import annotations import email import logging import random import re import time import typing from itertools import takewhile from types import TracebackType from ..exceptions import ( ConnectTimeoutError, InvalidHeader, MaxRetryError, ProtocolError, ProxyError, ReadTimeoutError, ResponseError, ) from .util import reraise if typing.TYPE_CHECKING: from ..connectionpool import ConnectionPool from ..response import BaseHTTPResponse log = logging.getLogger(__name__) # Data structure for representing the metadata of requests that result in a retry. class RequestHistory(typing.NamedTuple): method: str | None url: str | None error: Exception | None status: int | None redirect_location: str | None class Retry: """Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. Retries can be defined as a default for a pool: .. code-block:: python retries = Retry(connect=5, read=2, redirect=5) http = PoolManager(retries=retries) response = http.request("GET", "https://example.com/") Or per-request (which overrides the default for the pool): .. code-block:: python response = http.request("GET", "https://example.com/", retries=Retry(10)) Retries can be disabled by passing ``False``: .. code-block:: python response = http.request("GET", "https://example.com/", retries=False) Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless retries are disabled, in which case the causing exception will be raised. :param int total: Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other counts. Set to ``0`` to fail on the first retry. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int connect: How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Set to ``0`` to fail on the first retry of this type. :param int read: How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Set to ``0`` to fail on the first retry of this type. :param int redirect: How many redirects to perform. Limit this to avoid infinite redirect loops. A redirect is a HTTP response with a status code 301, 302, 303, 307 or 308. Set to ``0`` to fail on the first retry of this type. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int status: How many times to retry on bad status codes. These are retries made on responses, where status code matches ``status_forcelist``. Set to ``0`` to fail on the first retry of this type. :param int other: How many times to retry on other errors. Other errors are errors that are not connect, read, redirect or status errors. These errors might be raised after the request was sent to the server, so the request might have side-effects. Set to ``0`` to fail on the first retry of this type. If ``total`` is not set, it's a good idea to set this to 0 to account for unexpected edge cases and avoid infinite retry loops. :param Collection allowed_methods: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. Set to a ``None`` value to retry on any verb. :param Collection status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in ``allowed_methods`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. :param float backoff_factor: A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: {backoff factor} * (2 ** ({number of previous retries})) seconds. If `backoff_jitter` is non-zero, this sleep is extended by:: random.uniform(0, {backoff jitter}) seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever be longer than `backoff_max`. By default, backoff is disabled (factor set to 0). :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: whether we should raise an exception, or return a response, if status falls in ``status_forcelist`` range and retries have been exhausted. :param tuple history: The history of the request encountered during each call to :meth:`~Retry.increment`. The list is in the order the requests occurred. Each list item is of class :class:`RequestHistory`. :param bool respect_retry_after_header: Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. :param Collection remove_headers_on_redirect: Sequence of headers to remove from the request when a response indicating a redirect is returned before firing off the redirected request. """ #: Default methods to be used for ``allowed_methods`` DEFAULT_ALLOWED_METHODS = frozenset( ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] ) #: Default status codes to be used for ``status_forcelist`` RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) #: Default maximum backoff time. DEFAULT_BACKOFF_MAX = 120 # Backward compatibility; assigned outside of the class. DEFAULT: typing.ClassVar[Retry] def __init__( self, total: bool | int | None = 10, connect: int | None = None, read: int | None = None, redirect: bool | int | None = None, status: int | None = None, other: int | None = None, allowed_methods: typing.Collection[str] | None = DEFAULT_ALLOWED_METHODS, status_forcelist: typing.Collection[int] | None = None, backoff_factor: float = 0, backoff_max: float = DEFAULT_BACKOFF_MAX, raise_on_redirect: bool = True, raise_on_status: bool = True, history: tuple[RequestHistory, ...] | None = None, respect_retry_after_header: bool = True, remove_headers_on_redirect: typing.Collection[ str ] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT, backoff_jitter: float = 0.0, ) -> None: self.total = total self.connect = connect self.read = read self.status = status self.other = other if redirect is False or total is False: redirect = 0 raise_on_redirect = False self.redirect = redirect self.status_forcelist = status_forcelist or set() self.allowed_methods = allowed_methods self.backoff_factor = backoff_factor self.backoff_max = backoff_max self.raise_on_redirect = raise_on_redirect self.raise_on_status = raise_on_status self.history = history or () self.respect_retry_after_header = respect_retry_after_header self.remove_headers_on_redirect = frozenset( h.lower() for h in remove_headers_on_redirect ) self.backoff_jitter = backoff_jitter def new(self, **kw: typing.Any) -> Retry: params = dict( total=self.total, connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, other=self.other, allowed_methods=self.allowed_methods, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, backoff_max=self.backoff_max, raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, remove_headers_on_redirect=self.remove_headers_on_redirect, respect_retry_after_header=self.respect_retry_after_header, backoff_jitter=self.backoff_jitter, ) params.update(kw) return type(self)(**params) # type: ignore[arg-type] @classmethod def from_int( cls, retries: Retry | bool | int | None, redirect: bool | int | None = True, default: Retry | bool | int | None = None, ) -> Retry: """Backwards-compatibility for the old retries format.""" if retries is None: retries = default if default is not None else cls.DEFAULT if isinstance(retries, Retry): return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug("Converted retries value: %r -> %r", retries, new_retries) return new_retries def get_backoff_time(self) -> float: """Formula for computing the current backoff :rtype: float """ # We want to consider only the last consecutive errors sequence (Ignore redirects). consecutive_errors_len = len( list( takewhile(lambda x: x.redirect_location is None, reversed(self.history)) ) ) if consecutive_errors_len <= 1: return 0 backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) if self.backoff_jitter != 0.0: backoff_value += random.random() * self.backoff_jitter return float(max(0, min(self.backoff_max, backoff_value))) def parse_retry_after(self, retry_after: str) -> float: seconds: float # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 if re.match(r"^\s*[0-9]+\s*$", retry_after): seconds = int(retry_after) else: retry_date_tuple = email.utils.parsedate_tz(retry_after) if retry_date_tuple is None: raise InvalidHeader(f"Invalid Retry-After header: {retry_after}") retry_date = email.utils.mktime_tz(retry_date_tuple) seconds = retry_date - time.time() seconds = max(seconds, 0) return seconds def get_retry_after(self, response: BaseHTTPResponse) -> float | None: """Get the value of Retry-After in seconds.""" retry_after = response.headers.get("Retry-After") if retry_after is None: return None return self.parse_retry_after(retry_after) def sleep_for_retry(self, response: BaseHTTPResponse) -> bool: retry_after = self.get_retry_after(response) if retry_after: time.sleep(retry_after) return True return False def _sleep_backoff(self) -> None: backoff = self.get_backoff_time() if backoff <= 0: return time.sleep(backoff) def sleep(self, response: BaseHTTPResponse | None = None) -> None: """Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it will use an exponential backoff. By default, the backoff factor is 0 and this method will return immediately. """ if self.respect_retry_after_header and response: slept = self.sleep_for_retry(response) if slept: return self._sleep_backoff() def _is_connection_error(self, err: Exception) -> bool: """Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ if isinstance(err, ProxyError): err = err.original_error return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err: Exception) -> bool: """Errors that occur after the request has been started, so we should assume that the server began processing it. """ return isinstance(err, (ReadTimeoutError, ProtocolError)) def _is_method_retryable(self, method: str) -> bool: """Checks if a given HTTP method should be retried upon, depending if it is included in the allowed_methods """ if self.allowed_methods and method.upper() not in self.allowed_methods: return False return True def is_retry( self, method: str, status_code: int, has_retry_after: bool = False ) -> bool: """Is this method/status code retryable? (Based on allowlists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to be retried upon on the presence of the aforementioned header) """ if not self._is_method_retryable(method): return False if self.status_forcelist and status_code in self.status_forcelist: return True return bool( self.total and self.respect_retry_after_header and has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES) ) def is_exhausted(self) -> bool: """Are we out of retries?""" retry_counts = [ x for x in ( self.total, self.connect, self.read, self.redirect, self.status, self.other, ) if x ] if not retry_counts: return False return min(retry_counts) < 0 def increment( self, method: str | None = None, url: str | None = None, response: BaseHTTPResponse | None = None, error: Exception | None = None, _pool: ConnectionPool | None = None, _stacktrace: TracebackType | None = None, ) -> Retry: """Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.BaseHTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. """ if self.total is False and error: # Disabled, indicate to re-raise the error. raise reraise(type(error), error, _stacktrace) total = self.total if total is not None: total -= 1 connect = self.connect read = self.read redirect = self.redirect status_count = self.status other = self.other cause = "unknown" status = None redirect_location = None if error and self._is_connection_error(error): # Connect retry? if connect is False: raise reraise(type(error), error, _stacktrace) elif connect is not None: connect -= 1 elif error and self._is_read_error(error): # Read retry? if read is False or method is None or not self._is_method_retryable(method): raise reraise(type(error), error, _stacktrace) elif read is not None: read -= 1 elif error: # Other retry? if other is not None: other -= 1 elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: redirect -= 1 cause = "too many redirects" response_redirect_location = response.get_redirect_location() if response_redirect_location: redirect_location = response_redirect_location status = response.status else: # Incrementing because of a server error like a 500 in # status_forcelist and the given method is in the allowed_methods cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: status_count -= 1 cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) status = response.status history = self.history + ( RequestHistory(method, url, error, status, redirect_location), ) new_retry = self.new( total=total, connect=connect, read=read, redirect=redirect, status=status_count, other=other, history=history, ) if new_retry.is_exhausted(): reason = error or ResponseError(cause) raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type] log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) return new_retry def __repr__(self) -> str: return ( f"{type(self).__name__}(total={self.total}, connect={self.connect}, " f"read={self.read}, redirect={self.redirect}, status={self.status})" ) # For backwards compatibility (equivalent to pre-v1.9): Retry.DEFAULT = Retry(3) PKQZ,K,K util/ssl_.pynu[from __future__ import annotations import hmac import os import socket import sys import typing import warnings from binascii import unhexlify from hashlib import md5, sha1, sha256 from ..exceptions import ProxySchemeUnsupported, SSLError from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE SSLContext = None SSLTransport = None HAS_NEVER_CHECK_COMMON_NAME = False IS_PYOPENSSL = False IS_SECURETRANSPORT = False ALPN_PROTOCOLS = ["http/1.1"] _TYPE_VERSION_INFO = typing.Tuple[int, int, int, str, int] # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} def _is_bpo_43522_fixed( implementation_name: str, version_info: _TYPE_VERSION_INFO, pypy_version_info: _TYPE_VERSION_INFO | None, ) -> bool: """Return True for CPython 3.8.9+, 3.9.3+ or 3.10+ and PyPy 7.3.8+ where setting SSLContext.hostname_checks_common_name to False works. Outside of CPython and PyPy we don't know which implementations work or not so we conservatively use our hostname matching as we know that works on all implementations. https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963 https://foss.heptapod.net/pypy/pypy/-/issues/3539 """ if implementation_name == "pypy": # https://foss.heptapod.net/pypy/pypy/-/issues/3129 return pypy_version_info >= (7, 3, 8) and version_info >= (3, 8) # type: ignore[operator] elif implementation_name == "cpython": major_minor = version_info[:2] micro = version_info[2] return ( (major_minor == (3, 8) and micro >= 9) or (major_minor == (3, 9) and micro >= 3) or major_minor >= (3, 10) ) else: # Defensive: return False def _is_has_never_check_common_name_reliable( openssl_version: str, openssl_version_number: int, implementation_name: str, version_info: _TYPE_VERSION_INFO, pypy_version_info: _TYPE_VERSION_INFO | None, ) -> bool: # As of May 2023, all released versions of LibreSSL fail to reject certificates with # only common names, see https://github.com/urllib3/urllib3/pull/3024 is_openssl = openssl_version.startswith("OpenSSL ") # Before fixing OpenSSL issue #14579, the SSL_new() API was not copying hostflags # like X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython. # https://github.com/openssl/openssl/issues/14579 # This was released in OpenSSL 1.1.1l+ (>=0x101010cf) is_openssl_issue_14579_fixed = openssl_version_number >= 0x101010CF return is_openssl and ( is_openssl_issue_14579_fixed or _is_bpo_43522_fixed(implementation_name, version_info, pypy_version_info) ) if typing.TYPE_CHECKING: from ssl import VerifyMode from typing_extensions import Literal, TypedDict from .ssltransport import SSLTransport as SSLTransportType class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): subjectAltName: tuple[tuple[str, str], ...] subject: tuple[tuple[tuple[str, str], ...], ...] serialNumber: str # Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X' _SSL_VERSION_TO_TLS_VERSION: dict[int, int] = {} try: # Do we have ssl at all? import ssl from ssl import ( # type: ignore[assignment] CERT_REQUIRED, HAS_NEVER_CHECK_COMMON_NAME, OP_NO_COMPRESSION, OP_NO_TICKET, OPENSSL_VERSION, OPENSSL_VERSION_NUMBER, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, OP_NO_SSLv2, OP_NO_SSLv3, SSLContext, TLSVersion, ) PROTOCOL_SSLv23 = PROTOCOL_TLS # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython # 3.8.9, 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+ if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable( OPENSSL_VERSION, OPENSSL_VERSION_NUMBER, sys.implementation.name, sys.version_info, sys.pypy_version_info if sys.implementation.name == "pypy" else None, # type: ignore[attr-defined] ): HAS_NEVER_CHECK_COMMON_NAME = False # Need to be careful here in case old TLS versions get # removed in future 'ssl' module implementations. for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"): try: _SSL_VERSION_TO_TLS_VERSION[getattr(ssl, f"PROTOCOL_{attr}")] = getattr( TLSVersion, attr ) except AttributeError: # Defensive: continue from .ssltransport import SSLTransport # type: ignore[assignment] except ImportError: OP_NO_COMPRESSION = 0x20000 # type: ignore[assignment] OP_NO_TICKET = 0x4000 # type: ignore[assignment] OP_NO_SSLv2 = 0x1000000 # type: ignore[assignment] OP_NO_SSLv3 = 0x2000000 # type: ignore[assignment] PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 # type: ignore[assignment] PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment] _TYPE_PEER_CERT_RET = typing.Union["_TYPE_PEER_CERT_RET_DICT", bytes, None] def assert_fingerprint(cert: bytes | None, fingerprint: str) -> None: """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ if cert is None: raise SSLError("No certificate for the peer.") fingerprint = fingerprint.replace(":", "").lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: raise SSLError(f"Fingerprint of invalid length: {fingerprint}") # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if not hmac.compare_digest(cert_digest, fingerprint_bytes): raise SSLError( f'Fingerprints did not match. Expected "{fingerprint}", got "{cert_digest.hex()}"' ) def resolve_cert_reqs(candidate: None | int | str) -> VerifyMode: """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. """ if candidate is None: return CERT_REQUIRED if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, "CERT_" + candidate) return res # type: ignore[no-any-return] return candidate # type: ignore[return-value] def resolve_ssl_version(candidate: None | int | str) -> int: """ like resolve_cert_reqs """ if candidate is None: return PROTOCOL_TLS if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, "PROTOCOL_" + candidate) return typing.cast(int, res) return candidate def create_urllib3_context( ssl_version: int | None = None, cert_reqs: int | None = None, options: int | None = None, ciphers: str | None = None, ssl_minimum_version: int | None = None, ssl_maximum_version: int | None = None, ) -> ssl.SSLContext: """Creates and configures an :class:`ssl.SSLContext` instance for use with urllib3. :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. This parameter is deprecated instead use 'ssl_minimum_version'. :param ssl_minimum_version: The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. :param ssl_maximum_version: The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the default value. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. Defaults to either system configured ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers. :returns: Constructed SSLContext object with specified options :rtype: SSLContext """ if SSLContext is None: raise TypeError("Can't create an SSLContext object without an ssl module") # This means 'ssl_version' was specified as an exact value. if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT): # Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version' # to avoid conflicts. if ssl_minimum_version is not None or ssl_maximum_version is not None: raise ValueError( "Can't specify both 'ssl_version' and either " "'ssl_minimum_version' or 'ssl_maximum_version'" ) # 'ssl_version' is deprecated and will be removed in the future. else: # Use 'ssl_minimum_version' and 'ssl_maximum_version' instead. ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get( ssl_version, TLSVersion.MINIMUM_SUPPORTED ) ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get( ssl_version, TLSVersion.MAXIMUM_SUPPORTED ) # This warning message is pushing users to use 'ssl_minimum_version' # instead of both min/max. Best practice is to only set the minimum version and # keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED' warnings.warn( "'ssl_version' option is deprecated and will be " "removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'", category=DeprecationWarning, stacklevel=2, ) # PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT context = SSLContext(PROTOCOL_TLS_CLIENT) if ssl_minimum_version is not None: context.minimum_version = ssl_minimum_version else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here context.minimum_version = TLSVersion.TLSv1_2 if ssl_maximum_version is not None: context.maximum_version = ssl_maximum_version # Unless we're given ciphers defer to either system ciphers in # the case of OpenSSL 1.1.1+ or use our own secure default ciphers. if ciphers: context.set_ciphers(ciphers) # Setting the default here, as we may have no ssl module on import cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs if options is None: options = 0 # SSLv2 is easily broken and is considered harmful and dangerous options |= OP_NO_SSLv2 # SSLv3 has several problems and is now dangerous options |= OP_NO_SSLv3 # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # (issue #309) options |= OP_NO_COMPRESSION # TLSv1.2 only. Unless set explicitly, do not request tickets. # This may save some bandwidth on wire, and although the ticket is encrypted, # there is a risk associated with it being on wire, # if the server is not rotating its ticketing keys properly. options |= OP_NO_TICKET context.options |= options # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is # necessary for conditional client cert authentication with TLS 1.3. # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older # versions of Python. We only enable on Python 3.7.4+ or if certificate # verification is enabled to work around Python issue #37428 # See: https://bugs.python.org/issue37428 if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( context, "post_handshake_auth", None ) is not None: context.post_handshake_auth = True # The order of the below lines setting verify_mode and check_hostname # matter due to safe-guards SSLContext has to prevent an SSLContext with # check_hostname=True, verify_mode=NONE/OPTIONAL. # We always set 'check_hostname=False' for pyOpenSSL so we rely on our own # 'ssl.match_hostname()' implementation. if cert_reqs == ssl.CERT_REQUIRED and not IS_PYOPENSSL: context.verify_mode = cert_reqs context.check_hostname = True else: context.check_hostname = False context.verify_mode = cert_reqs try: context.hostname_checks_common_name = False except AttributeError: # Defensive: for CPython < 3.8.9 and 3.9.3; for PyPy < 7.3.8 pass # Enable logging of TLS session keys via defacto standard environment variable # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. if hasattr(context, "keylog_filename"): sslkeylogfile = os.environ.get("SSLKEYLOGFILE") if sslkeylogfile: context.keylog_filename = sslkeylogfile return context @typing.overload def ssl_wrap_socket( sock: socket.socket, keyfile: str | None = ..., certfile: str | None = ..., cert_reqs: int | None = ..., ca_certs: str | None = ..., server_hostname: str | None = ..., ssl_version: int | None = ..., ciphers: str | None = ..., ssl_context: ssl.SSLContext | None = ..., ca_cert_dir: str | None = ..., key_password: str | None = ..., ca_cert_data: None | str | bytes = ..., tls_in_tls: Literal[False] = ..., ) -> ssl.SSLSocket: ... @typing.overload def ssl_wrap_socket( sock: socket.socket, keyfile: str | None = ..., certfile: str | None = ..., cert_reqs: int | None = ..., ca_certs: str | None = ..., server_hostname: str | None = ..., ssl_version: int | None = ..., ciphers: str | None = ..., ssl_context: ssl.SSLContext | None = ..., ca_cert_dir: str | None = ..., key_password: str | None = ..., ca_cert_data: None | str | bytes = ..., tls_in_tls: bool = ..., ) -> ssl.SSLSocket | SSLTransportType: ... def ssl_wrap_socket( sock: socket.socket, keyfile: str | None = None, certfile: str | None = None, cert_reqs: int | None = None, ca_certs: str | None = None, server_hostname: str | None = None, ssl_version: int | None = None, ciphers: str | None = None, ssl_context: ssl.SSLContext | None = None, ca_cert_dir: str | None = None, key_password: str | None = None, ca_cert_data: None | str | bytes = None, tls_in_tls: bool = False, ) -> ssl.SSLSocket | SSLTransportType: """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket. """ context = ssl_context if context is None: # Note: This branch of code and all the variables in it are only used in tests. # We should consider deprecating and removing this code. context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) if ca_certs or ca_cert_dir or ca_cert_data: try: context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) except OSError as e: raise SSLError(e) from e elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows. context.load_default_certs() # Attempt to detect if we get the goofy behavior of the # keyfile being encrypted and OpenSSL asking for the # passphrase via the terminal and instead error out. if keyfile and key_password is None and _is_key_file_encrypted(keyfile): raise SSLError("Client private key is encrypted, password is required") if certfile: if key_password is None: context.load_cert_chain(certfile, keyfile) else: context.load_cert_chain(certfile, keyfile, key_password) try: context.set_alpn_protocols(ALPN_PROTOCOLS) except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols pass ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname) return ssl_sock def is_ipaddress(hostname: str | bytes) -> bool: """Detects whether the hostname given is an IPv4 or IPv6 address. Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ if isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. hostname = hostname.decode("ascii") return bool(_IPV4_RE.match(hostname) or _BRACELESS_IPV6_ADDRZ_RE.match(hostname)) def _is_key_file_encrypted(key_file: str) -> bool: """Detects if a key file is encrypted or not.""" with open(key_file) as f: for line in f: # Look for Proc-Type: 4,ENCRYPTED if "ENCRYPTED" in line: return True return False def _ssl_wrap_socket_impl( sock: socket.socket, ssl_context: ssl.SSLContext, tls_in_tls: bool, server_hostname: str | None = None, ) -> ssl.SSLSocket | SSLTransportType: if tls_in_tls: if not SSLTransport: # Import error, ssl is not available. raise ProxySchemeUnsupported( "TLS in TLS requires support for the 'ssl' module" ) SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) return SSLTransport(sock, ssl_context, server_hostname) return ssl_context.wrap_socket(sock, server_hostname=server_hostname) PKQZe$util/ssl_match_hostname.pynu["""The match_hostname() function from Python 3.5, essential when using SSL.""" # Note: This file is under the PSF license as the code comes from the python # stdlib. http://docs.python.org/3/license.html # It is modified to remove commonName support. from __future__ import annotations import ipaddress import re import typing from ipaddress import IPv4Address, IPv6Address if typing.TYPE_CHECKING: from .ssl_ import _TYPE_PEER_CERT_RET_DICT __version__ = "3.5.0.1" class CertificateError(ValueError): pass def _dnsname_match( dn: typing.Any, hostname: str, max_wildcards: int = 1 ) -> typing.Match[str] | None | bool: """Matching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 """ pats = [] if not dn: return False # Ported from python3-syntax: # leftmost, *remainder = dn.split(r'.') parts = dn.split(r".") leftmost = parts[0] remainder = parts[1:] wildcards = leftmost.count("*") if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( "too many wildcards in certificate DNS name: " + repr(dn) ) # speed up common case w/o wildcards if not wildcards: return bool(dn.lower() == hostname.lower()) # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. if leftmost == "*": # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. pats.append("[^.]+") elif leftmost.startswith("xn--") or hostname.startswith("xn--"): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or # U-label of an internationalized domain name. pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) return pat.match(hostname) def _ipaddress_match(ipname: str, host_ip: IPv4Address | IPv6Address) -> bool: """Exact matching of IP addresses. RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded bytes of the IP address. An IP version 4 address is 4 octets, and an IP version 6 address is 16 octets. [...] A reference identity of type IP-ID matches if the address is identical to an iPAddress value of the subjectAltName extension of the certificate." """ # OpenSSL may add a trailing newline to a subjectAltName's IP address # Divergence from upstream: ipaddress can't handle byte str ip = ipaddress.ip_address(ipname.rstrip()) return bool(ip.packed == host_ip.packed) def match_hostname( cert: _TYPE_PEER_CERT_RET_DICT | None, hostname: str, hostname_checks_common_name: bool = False, ) -> None: """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. """ if not cert: raise ValueError( "empty or no certificate, match_hostname needs a " "SSL socket or SSL context with either " "CERT_OPTIONAL or CERT_REQUIRED" ) try: # Divergence from upstream: ipaddress can't handle byte str # # The ipaddress module shipped with Python < 3.9 does not support # scoped IPv6 addresses so we unconditionally strip the Zone IDs for # now. Once we drop support for Python 3.9 we can remove this branch. if "%" in hostname: host_ip = ipaddress.ip_address(hostname[: hostname.rfind("%")]) else: host_ip = ipaddress.ip_address(hostname) except ValueError: # Not an IP address (common case) host_ip = None dnsnames = [] san: tuple[tuple[str, str], ...] = cert.get("subjectAltName", ()) key: str value: str for key, value in san: if key == "DNS": if host_ip is None and _dnsname_match(value, hostname): return dnsnames.append(value) elif key == "IP Address": if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) # We only check 'commonName' if it's enabled and we're not verifying # an IP address. IP addresses aren't valid within 'commonName'. if hostname_checks_common_name and host_ip is None and not dnsnames: for sub in cert.get("subject", ()): for key, value in sub: if key == "commonName": if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: raise CertificateError( "hostname %r " "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) ) elif len(dnsnames) == 1: raise CertificateError(f"hostname {hostname!r} doesn't match {dnsnames[0]!r}") else: raise CertificateError("no appropriate subjectAltName fields were found") PKQZn=0_U#U#util/ssltransport.pynu[from __future__ import annotations import io import socket import ssl import typing from ..exceptions import ProxySchemeUnsupported if typing.TYPE_CHECKING: from typing_extensions import Literal from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT _SelfT = typing.TypeVar("_SelfT", bound="SSLTransport") _WriteBuffer = typing.Union[bytearray, memoryview] _ReturnValue = typing.TypeVar("_ReturnValue") SSL_BLOCKSIZE = 16384 class SSLTransport: """ The SSLTransport wraps an existing socket and establishes an SSL connection. Contrary to Python's implementation of SSLSocket, it allows you to chain multiple TLS connections together. It's particularly useful if you need to implement TLS within TLS. The class supports most of the socket API operations. """ @staticmethod def _validate_ssl_context_for_tls_in_tls(ssl_context: ssl.SSLContext) -> None: """ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used for TLS in TLS. The only requirement is that the ssl_context provides the 'wrap_bio' methods. """ if not hasattr(ssl_context, "wrap_bio"): raise ProxySchemeUnsupported( "TLS in TLS requires SSLContext.wrap_bio() which isn't " "available on non-native SSLContext" ) def __init__( self, socket: socket.socket, ssl_context: ssl.SSLContext, server_hostname: str | None = None, suppress_ragged_eofs: bool = True, ) -> None: """ Create an SSLTransport around socket using the provided ssl_context. """ self.incoming = ssl.MemoryBIO() self.outgoing = ssl.MemoryBIO() self.suppress_ragged_eofs = suppress_ragged_eofs self.socket = socket self.sslobj = ssl_context.wrap_bio( self.incoming, self.outgoing, server_hostname=server_hostname ) # Perform initial handshake. self._ssl_io_loop(self.sslobj.do_handshake) def __enter__(self: _SelfT) -> _SelfT: return self def __exit__(self, *_: typing.Any) -> None: self.close() def fileno(self) -> int: return self.socket.fileno() def read(self, len: int = 1024, buffer: typing.Any | None = None) -> int | bytes: return self._wrap_ssl_read(len, buffer) def recv(self, buflen: int = 1024, flags: int = 0) -> int | bytes: if flags != 0: raise ValueError("non-zero flags not allowed in calls to recv") return self._wrap_ssl_read(buflen) def recv_into( self, buffer: _WriteBuffer, nbytes: int | None = None, flags: int = 0, ) -> None | int | bytes: if flags != 0: raise ValueError("non-zero flags not allowed in calls to recv_into") if nbytes is None: nbytes = len(buffer) return self.read(nbytes, buffer) def sendall(self, data: bytes, flags: int = 0) -> None: if flags != 0: raise ValueError("non-zero flags not allowed in calls to sendall") count = 0 with memoryview(data) as view, view.cast("B") as byte_view: amount = len(byte_view) while count < amount: v = self.send(byte_view[count:]) count += v def send(self, data: bytes, flags: int = 0) -> int: if flags != 0: raise ValueError("non-zero flags not allowed in calls to send") return self._ssl_io_loop(self.sslobj.write, data) def makefile( self, mode: str, buffering: int | None = None, *, encoding: str | None = None, errors: str | None = None, newline: str | None = None, ) -> typing.BinaryIO | typing.TextIO | socket.SocketIO: """ Python's httpclient uses makefile and buffered io when reading HTTP messages and we need to support it. This is unfortunately a copy and paste of socket.py makefile with small changes to point to the socket directly. """ if not set(mode) <= {"r", "w", "b"}: raise ValueError(f"invalid mode {mode!r} (only r, w, b allowed)") writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = socket.SocketIO(self, rawmode) # type: ignore[arg-type] self.socket._io_refs += 1 # type: ignore[attr-defined] if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw buffer: typing.BinaryIO if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) # type: ignore[assignment] elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode # type: ignore[misc] return text def unwrap(self) -> None: self._ssl_io_loop(self.sslobj.unwrap) def close(self) -> None: self.socket.close() @typing.overload def getpeercert( self, binary_form: Literal[False] = ... ) -> _TYPE_PEER_CERT_RET_DICT | None: ... @typing.overload def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET: return self.sslobj.getpeercert(binary_form) # type: ignore[return-value] def version(self) -> str | None: return self.sslobj.version() def cipher(self) -> tuple[str, str, int] | None: return self.sslobj.cipher() def selected_alpn_protocol(self) -> str | None: return self.sslobj.selected_alpn_protocol() def selected_npn_protocol(self) -> str | None: return self.sslobj.selected_npn_protocol() def shared_ciphers(self) -> list[tuple[str, str, int]] | None: return self.sslobj.shared_ciphers() def compression(self) -> str | None: return self.sslobj.compression() def settimeout(self, value: float | None) -> None: self.socket.settimeout(value) def gettimeout(self) -> float | None: return self.socket.gettimeout() def _decref_socketios(self) -> None: self.socket._decref_socketios() # type: ignore[attr-defined] def _wrap_ssl_read(self, len: int, buffer: bytearray | None = None) -> int | bytes: try: return self._ssl_io_loop(self.sslobj.read, len, buffer) except ssl.SSLError as e: if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: return 0 # eof, return 0. else: raise # func is sslobj.do_handshake or sslobj.unwrap @typing.overload def _ssl_io_loop(self, func: typing.Callable[[], None]) -> None: ... # func is sslobj.write, arg1 is data @typing.overload def _ssl_io_loop(self, func: typing.Callable[[bytes], int], arg1: bytes) -> int: ... # func is sslobj.read, arg1 is len, arg2 is buffer @typing.overload def _ssl_io_loop( self, func: typing.Callable[[int, bytearray | None], bytes], arg1: int, arg2: bytearray | None, ) -> bytes: ... def _ssl_io_loop( self, func: typing.Callable[..., _ReturnValue], arg1: None | bytes | int = None, arg2: bytearray | None = None, ) -> _ReturnValue: """Performs an I/O loop between incoming/outgoing and the socket.""" should_loop = True ret = None while should_loop: errno = None try: if arg1 is None and arg2 is None: ret = func() elif arg2 is None: ret = func(arg1) else: ret = func(arg1, arg2) except ssl.SSLError as e: if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): # WANT_READ, and WANT_WRITE are expected, others are not. raise e errno = e.errno buf = self.outgoing.read() self.socket.sendall(buf) if errno is None: should_loop = False elif errno == ssl.SSL_ERROR_WANT_READ: buf = self.socket.recv(SSL_BLOCKSIZE) if buf: self.incoming.write(buf) else: self.incoming.write_eof() return typing.cast(_ReturnValue, ret) PKQZix!)!)util/timeout.pynu[from __future__ import annotations import time import typing from enum import Enum from socket import getdefaulttimeout from ..exceptions import TimeoutStateError if typing.TYPE_CHECKING: from typing_extensions import Final class _TYPE_DEFAULT(Enum): # This value should never be passed to socket.settimeout() so for safety we use a -1. # socket.settimout() raises a ValueError for negative values. token = -1 _DEFAULT_TIMEOUT: Final[_TYPE_DEFAULT] = _TYPE_DEFAULT.token _TYPE_TIMEOUT = typing.Optional[typing.Union[float, _TYPE_DEFAULT]] class Timeout: """Timeout configuration. Timeouts can be defined as a default for a pool: .. code-block:: python import urllib3 timeout = urllib3.util.Timeout(connect=2.0, read=7.0) http = urllib3.PoolManager(timeout=timeout) resp = http.request("GET", "https://example.com/") print(resp.status) Or per-request (which overrides the default for the pool): .. code-block:: python response = http.request("GET", "https://example.com/", timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``: .. code-block:: python no_timeout = Timeout(connect=None, read=None) response = http.request("GET", "https://example.com/", timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: int, float, or None :param connect: The maximum amount of time (in seconds) to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout for connection attempts. :type connect: int, float, or None :param read: The maximum amount of time (in seconds) to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout. :type read: int, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. """ #: A sentinel object representing the default timeout value DEFAULT_TIMEOUT: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT def __init__( self, total: _TYPE_TIMEOUT = None, connect: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, read: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, ) -> None: self._connect = self._validate_timeout(connect, "connect") self._read = self._validate_timeout(read, "read") self.total = self._validate_timeout(total, "total") self._start_connect: float | None = None def __repr__(self) -> str: return f"{type(self).__name__}(connect={self._connect!r}, read={self._read!r}, total={self.total!r})" # __str__ provided for backwards compatibility __str__ = __repr__ @staticmethod def resolve_default_timeout(timeout: _TYPE_TIMEOUT) -> float | None: return getdefaulttimeout() if timeout is _DEFAULT_TIMEOUT else timeout @classmethod def _validate_timeout(cls, value: _TYPE_TIMEOUT, name: str) -> _TYPE_TIMEOUT: """Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. """ if value is None or value is _DEFAULT_TIMEOUT: return value if isinstance(value, bool): raise ValueError( "Timeout cannot be a boolean value. It must " "be an int, float or None." ) try: float(value) except (TypeError, ValueError): raise ValueError( "Timeout value %s was %s, but it must be an " "int, float or None." % (name, value) ) from None try: if value <= 0: raise ValueError( "Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than or equal to 0." % (name, value) ) except TypeError: raise ValueError( "Timeout value %s was %s, but it must be an " "int, float or None." % (name, value) ) from None return value @classmethod def from_float(cls, timeout: _TYPE_TIMEOUT) -> Timeout: """Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, :attr:`urllib3.util.Timeout.DEFAULT_TIMEOUT`, or None :return: Timeout object :rtype: :class:`Timeout` """ return Timeout(read=timeout, connect=timeout) def clone(self) -> Timeout: """Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` """ # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self) -> float: """Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. """ if self._start_connect is not None: raise TimeoutStateError("Timeout timer has already been started.") self._start_connect = time.monotonic() return self._start_connect def get_connect_duration(self) -> float: """Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time in seconds. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: raise TimeoutStateError( "Can't get connect duration for timer that has not started." ) return time.monotonic() - self._start_connect @property def connect_timeout(self) -> _TYPE_TIMEOUT: """Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """ if self.total is None: return self._connect if self._connect is None or self._connect is _DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total) # type: ignore[type-var] @property def read_timeout(self) -> float | None: """Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ if ( self.total is not None and self.total is not _DEFAULT_TIMEOUT and self._read is not None and self._read is not _DEFAULT_TIMEOUT ): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not _DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: return self.resolve_default_timeout(self._read) PKQZX8m;m; util/url.pynu[from __future__ import annotations import re import typing from ..exceptions import LocationParseError from .util import to_str # We only want to normalize urls with an HTTP(S) scheme. # urllib3 infers URLs without a scheme (None) to be http. _NORMALIZABLE_SCHEMES = ("http", "https", None) # Almost all of these patterns were derived from the # 'rfc3986' module: https://github.com/python-hyper/rfc3986 _PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") _SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") _URI_RE = re.compile( r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" r"(?://([^\\/?#]*))?" r"([^?#]*)" r"(?:\?([^#]*))?" r"(?:#(.*))?$", re.UNICODE | re.DOTALL, ) _IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" _HEX_PAT = "[0-9A-Fa-f]{1,4}" _LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=_HEX_PAT, ipv4=_IPV4_PAT) _subs = {"hex": _HEX_PAT, "ls32": _LS32_PAT} _variations = [ # 6( h16 ":" ) ls32 "(?:%(hex)s:){6}%(ls32)s", # "::" 5( h16 ":" ) ls32 "::(?:%(hex)s:){5}%(ls32)s", # [ h16 ] "::" 4( h16 ":" ) ls32 "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", # [ *4( h16 ":" ) h16 ] "::" ls32 "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", # [ *5( h16 ":" ) h16 ] "::" h16 "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", # [ *6( h16 ":" ) h16 ] "::" "(?:(?:%(hex)s:){0,6}%(hex)s)?::", ] _UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" _IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" _ZONE_ID_PAT = "(?:%25|%)(?:[" + _UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" _IPV6_ADDRZ_PAT = r"\[" + _IPV6_PAT + r"(?:" + _ZONE_ID_PAT + r")?\]" _REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" _TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") _IPV4_RE = re.compile("^" + _IPV4_PAT + "$") _IPV6_RE = re.compile("^" + _IPV6_PAT + "$") _IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT + "$") _BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT[2:-2] + "$") _ZONE_ID_RE = re.compile("(" + _ZONE_ID_PAT + r")\]$") _HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % ( _REG_NAME_PAT, _IPV4_PAT, _IPV6_ADDRZ_PAT, ) _HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) _UNRESERVED_CHARS = set( "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" ) _SUB_DELIM_CHARS = set("!$&'()*+,;=") _USERINFO_CHARS = _UNRESERVED_CHARS | _SUB_DELIM_CHARS | {":"} _PATH_CHARS = _USERINFO_CHARS | {"@", "/"} _QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"} class Url( typing.NamedTuple( "Url", [ ("scheme", typing.Optional[str]), ("auth", typing.Optional[str]), ("host", typing.Optional[str]), ("port", typing.Optional[int]), ("path", typing.Optional[str]), ("query", typing.Optional[str]), ("fragment", typing.Optional[str]), ], ) ): """ Data structure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. """ def __new__( # type: ignore[no-untyped-def] cls, scheme: str | None = None, auth: str | None = None, host: str | None = None, port: int | None = None, path: str | None = None, query: str | None = None, fragment: str | None = None, ): if path and not path.startswith("/"): path = "/" + path if scheme is not None: scheme = scheme.lower() return super().__new__(cls, scheme, auth, host, port, path, query, fragment) @property def hostname(self) -> str | None: """For backwards-compatibility with urlparse. We're nice like that.""" return self.host @property def request_uri(self) -> str: """Absolute path including the query string.""" uri = self.path or "/" if self.query is not None: uri += "?" + self.query return uri @property def authority(self) -> str | None: """ Authority component as defined in RFC 3986 3.2. This includes userinfo (auth), host and port. i.e. userinfo@host:port """ userinfo = self.auth netloc = self.netloc if netloc is None or userinfo is None: return netloc else: return f"{userinfo}@{netloc}" @property def netloc(self) -> str | None: """ Network location including host and port. If you need the equivalent of urllib.parse's ``netloc``, use the ``authority`` property instead. """ if self.host is None: return None if self.port: return f"{self.host}:{self.port}" return self.host @property def url(self) -> str: """ Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: .. code-block:: python import urllib3 U = urllib3.util.parse_url("https://google.com/mail/") print(U.url) # "https://google.com/mail/" print( urllib3.util.Url("https", "username:password", "host.com", 80, "/path", "query", "fragment" ).url ) # "https://username:password@host.com:80/path?query#fragment" """ scheme, auth, host, port, path, query, fragment = self url = "" # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: url += scheme + "://" if auth is not None: url += auth + "@" if host is not None: url += host if port is not None: url += ":" + str(port) if path is not None: url += path if query is not None: url += "?" + query if fragment is not None: url += "#" + fragment return url def __str__(self) -> str: return self.url @typing.overload def _encode_invalid_chars( component: str, allowed_chars: typing.Container[str] ) -> str: # Abstract ... @typing.overload def _encode_invalid_chars( component: None, allowed_chars: typing.Container[str] ) -> None: # Abstract ... def _encode_invalid_chars( component: str | None, allowed_chars: typing.Container[str] ) -> str | None: """Percent-encodes a URI component without reapplying onto an already percent-encoded component. """ if component is None: return component component = to_str(component) # Normalize existing percent-encoded bytes. # Try to see if the component we're encoding is already percent-encoded # so we can skip all '%' characters but still encode all others. component, percent_encodings = _PERCENT_RE.subn( lambda match: match.group(0).upper(), component ) uri_bytes = component.encode("utf-8", "surrogatepass") is_percent_encoded = percent_encodings == uri_bytes.count(b"%") encoded_component = bytearray() for i in range(0, len(uri_bytes)): # Will return a single character bytestring byte = uri_bytes[i : i + 1] byte_ord = ord(byte) if (is_percent_encoded and byte == b"%") or ( byte_ord < 128 and byte.decode() in allowed_chars ): encoded_component += byte continue encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) return encoded_component.decode() def _remove_path_dot_segments(path: str) -> str: # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code segments = path.split("/") # Turn the path into a list of segments output = [] # Initialize the variable to use to store output for segment in segments: # '.' is the current directory, so ignore it, it is superfluous if segment == ".": continue # Anything other than '..', should be appended to the output if segment != "..": output.append(segment) # In this case segment == '..', if we can, we should pop the last # element elif output: output.pop() # If the path starts with '/' and the output is empty or the first string # is non-empty if path.startswith("/") and (not output or output[0]): output.insert(0, "") # If the path starts with '/.' or '/..' ensure we add one more empty # string to add a trailing '/' if path.endswith(("/.", "/..")): output.append("") return "/".join(output) @typing.overload def _normalize_host(host: None, scheme: str | None) -> None: ... @typing.overload def _normalize_host(host: str, scheme: str | None) -> str: ... def _normalize_host(host: str | None, scheme: str | None) -> str | None: if host: if scheme in _NORMALIZABLE_SCHEMES: is_ipv6 = _IPV6_ADDRZ_RE.match(host) if is_ipv6: # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID # separator as necessary to return a valid RFC 4007 scoped IP. match = _ZONE_ID_RE.search(host) if match: start, end = match.span(1) zone_id = host[start:end] if zone_id.startswith("%25") and zone_id != "%25": zone_id = zone_id[3:] else: zone_id = zone_id[1:] zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS) return f"{host[:start].lower()}%{zone_id}{host[end:]}" else: return host.lower() elif not _IPV4_RE.match(host): return to_str( b".".join([_idna_encode(label) for label in host.split(".")]), "ascii", ) return host def _idna_encode(name: str) -> bytes: if not name.isascii(): try: import idna except ImportError: raise LocationParseError( "Unable to parse URL without the 'idna' module" ) from None try: return idna.encode(name.lower(), strict=True, std3_rules=True) except idna.IDNAError: raise LocationParseError( f"Name '{name}' is not a valid IDNA label" ) from None return name.lower().encode("ascii") def _encode_target(target: str) -> str: """Percent-encodes a request target so that there are no invalid characters Pre-condition for this function is that 'target' must start with '/'. If that is the case then _TARGET_RE will always produce a match. """ match = _TARGET_RE.match(target) if not match: # Defensive: raise LocationParseError(f"{target!r} is not a valid request URI") path, query = match.groups() encoded_target = _encode_invalid_chars(path, _PATH_CHARS) if query is not None: query = _encode_invalid_chars(query, _QUERY_CHARS) encoded_target += "?" + query return encoded_target def parse_url(url: str) -> Url: """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. This parser is RFC 3986 and RFC 6874 compliant. The parser logic and helper functions are based heavily on work done in the ``rfc3986`` module. :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urllib.parse`. Example: .. code-block:: python import urllib3 print( urllib3.util.parse_url('http://google.com/mail/')) # Url(scheme='http', host='google.com', port=None, path='/mail/', ...) print( urllib3.util.parse_url('google.com:80')) # Url(scheme=None, host='google.com', port=80, path=None, ...) print( urllib3.util.parse_url('/foo?bar')) # Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ if not url: # Empty return Url() source_url = url if not _SCHEME_RE.search(url): url = "//" + url scheme: str | None authority: str | None auth: str | None host: str | None port: str | None port_int: int | None path: str | None query: str | None fragment: str | None try: scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr] normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES if scheme: scheme = scheme.lower() if authority: auth, _, host_port = authority.rpartition("@") auth = auth or None host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr] if auth and normalize_uri: auth = _encode_invalid_chars(auth, _USERINFO_CHARS) if port == "": port = None else: auth, host, port = None, None, None if port is not None: port_int = int(port) if not (0 <= port_int <= 65535): raise LocationParseError(url) else: port_int = None host = _normalize_host(host, scheme) if normalize_uri and path: path = _remove_path_dot_segments(path) path = _encode_invalid_chars(path, _PATH_CHARS) if normalize_uri and query: query = _encode_invalid_chars(query, _QUERY_CHARS) if normalize_uri and fragment: fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS) except (ValueError, AttributeError) as e: raise LocationParseError(source_url) from e # For the sake of backwards compatibility we put empty # string values for path if there are any defined values # beyond the path in the URL. # TODO: Remove this when we break backwards compatibility. if not path: if query is not None or fragment is not None: path = "" else: path = None return Url( scheme=scheme, auth=auth, host=host, port=port_int, path=path, query=query, fragment=fragment, ) PKQZ_zz util/util.pynu[from __future__ import annotations import typing from types import TracebackType def to_bytes( x: str | bytes, encoding: str | None = None, errors: str | None = None ) -> bytes: if isinstance(x, bytes): return x elif not isinstance(x, str): raise TypeError(f"not expecting type {type(x).__name__}") if encoding or errors: return x.encode(encoding or "utf-8", errors=errors or "strict") return x.encode() def to_str( x: str | bytes, encoding: str | None = None, errors: str | None = None ) -> str: if isinstance(x, str): return x elif not isinstance(x, bytes): raise TypeError(f"not expecting type {type(x).__name__}") if encoding or errors: return x.decode(encoding or "utf-8", errors=errors or "strict") return x.decode() def reraise( tp: type[BaseException] | None, value: BaseException, tb: TracebackType | None = None, ) -> typing.NoReturn: try: if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value finally: value = None # type: ignore[assignment] tb = None PKQZVEGG util/wait.pynu[from __future__ import annotations import select import socket from functools import partial __all__ = ["wait_for_read", "wait_for_write"] # How should we wait on sockets? # # There are two types of APIs you can use for waiting on sockets: the fancy # modern stateful APIs like epoll/kqueue, and the older stateless APIs like # select/poll. The stateful APIs are more efficient when you have a lots of # sockets to keep track of, because you can set them up once and then use them # lots of times. But we only ever want to wait on a single socket at a time # and don't want to keep track of state, so the stateless APIs are actually # more efficient. So we want to use select() or poll(). # # Now, how do we choose between select() and poll()? On traditional Unixes, # select() has a strange calling convention that makes it slow, or fail # altogether, for high-numbered file descriptors. The point of poll() is to fix # that, so on Unixes, we prefer poll(). # # On Windows, there is no poll() (or at least Python doesn't provide a wrapper # for it), but that's OK, because on Windows, select() doesn't have this # strange calling convention; plain select() works fine. # # So: on Windows we use select(), and everywhere else we use poll(). We also # fall back to select() in case poll() is somehow broken or missing. def select_wait_for_socket( sock: socket.socket, read: bool = False, write: bool = False, timeout: float | None = None, ) -> bool: if not read and not write: raise RuntimeError("must specify at least one of read=True, write=True") rcheck = [] wcheck = [] if read: rcheck.append(sock) if write: wcheck.append(sock) # When doing a non-blocking connect, most systems signal success by # marking the socket writable. Windows, though, signals success by marked # it as "exceptional". We paper over the difference by checking the write # sockets for both conditions. (The stdlib selectors module does the same # thing.) fn = partial(select.select, rcheck, wcheck, wcheck) rready, wready, xready = fn(timeout) return bool(rready or wready or xready) def poll_wait_for_socket( sock: socket.socket, read: bool = False, write: bool = False, timeout: float | None = None, ) -> bool: if not read and not write: raise RuntimeError("must specify at least one of read=True, write=True") mask = 0 if read: mask |= select.POLLIN if write: mask |= select.POLLOUT poll_obj = select.poll() poll_obj.register(sock, mask) # For some reason, poll() takes timeout in milliseconds def do_poll(t: float | None) -> list[tuple[int, int]]: if t is not None: t *= 1000 return poll_obj.poll(t) return bool(do_poll(timeout)) def _have_working_poll() -> bool: # Apparently some systems have a select.poll that fails as soon as you try # to use it, either due to strange configuration or broken monkeypatching # from libraries like eventlet/greenlet. try: poll_obj = select.poll() poll_obj.poll(0) except (AttributeError, OSError): return False else: return True def wait_for_socket( sock: socket.socket, read: bool = False, write: bool = False, timeout: float | None = None, ) -> bool: # We delay choosing which implementation to use until the first time we're # called. We could do it at import time, but then we might make the wrong # decision if someone goes wild with monkeypatching select.poll after # we're imported. global wait_for_socket if _have_working_poll(): wait_for_socket = poll_wait_for_socket elif hasattr(select, "select"): wait_for_socket = select_wait_for_socket return wait_for_socket(sock, read, write, timeout) def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool: """Waits for reading to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, read=True, timeout=timeout) def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool: """Waits for writing to be available on a given socket. Returns True if the socket is readable, or False if the timeout expired. """ return wait_for_socket(sock, write=True, timeout=timeout) PKQZY= __init__.pynu[PKQZRb$__pycache__/__init__.cpython-311.pycnu[PKQZɔV,+__pycache__/_base_connection.cpython-311.pycnu[PKQZ1.c.c(9H__pycache__/_collections.cpython-311.pycnu[PKQZ8cdd!d!,__pycache__/_request_methods.cpython-311.pycnu[PKQZ;  $__pycache__/_version.cpython-311.pycnu[PKQZ/^sZZ&__pycache__/connection.cpython-311.pycnu[PKQZ[|FF*P__pycache__/connectionpool.cpython-311.pycnu[PKQZWZ!K!K&.__pycache__/exceptions.cpython-311.pycnu[PKQZJ22"<__pycache__/fields.cpython-311.pycnu[PKQZҌD &&$o__pycache__/filepost.cpython-311.pycnu[PKQZ֕bb'R__pycache__/poolmanager.cpython-311.pycnu[PKQZ:>$__pycache__/response.cpython-311.pycnu[PKQZ_base_connection.pynu[PKQZTAAJ_collections.pynu[PKQZڃ5LL:_request_methods.pynu[PKQZ^.bb _version.pynu[PKQZ&& fconnection.pynu[PKQZ;lѧѧɠconnectionpool.pynu[PKQZHcontrib/__init__.pynu[PKQZ,,Icontrib/__pycache__/__init__.cpython-311.pycnu[PKQZft`o`o-6Jcontrib/__pycache__/pyopenssl.cpython-311.pycnu[PKQZ33contrib/__pycache__/securetransport.cpython-311.pycnu[PKQZ~_"")oKcontrib/__pycache__/socks.cpython-311.pycnu[PKQZ$ncontrib/_securetransport/__init__.pynu[PKQZ'?B=ncontrib/_securetransport/__pycache__/__init__.cpython-311.pycnu[PKQZnQ<5<5=:pcontrib/_securetransport/__pycache__/bindings.cpython-311.pycnu[PKQZ3GG>contrib/_securetransport/__pycache__/low_level.cpython-311.pycnu[PKQZ-t8t8$5contrib/_securetransport/bindings.pynu[PKQZ6\?\?%&contrib/_securetransport/low_level.pynu[PKQZn\KKfcontrib/pyopenssl.pynu[PKQZ]MII߲contrib/securetransport.pynu[PKQZ潌##r8contrib/socks.pynu[PKQZ#$$ Vexceptions.pynu[PKQZoLs++ {fields.pynu[PKQZ洖[ [ filepost.pynu[PKQZ3xXxXpoolmanager.pynu[PKQZ]]R py.typednu[PKQZJ  response.pynu[PKQZ]?D util/__init__.pynu[PKQZ6) util/__pycache__/__init__.cpython-311.pycnu[PKQZkc+c util/__pycache__/connection.cpython-311.pycnu[PKQZUI& util/__pycache__/proxy.cpython-311.pycnu[PKQZ;N""( util/__pycache__/request.cpython-311.pycnu[PKQZjM  ) util/__pycache__/response.cpython-311.pycnu[PKQZ;x;R;R& util/__pycache__/retry.cpython-311.pycnu[PKQZcDD%M util/__pycache__/ssl_.cpython-311.pycnu[PKQZlD|hh3y util/__pycache__/ssl_match_hostname.cpython-311.pycnu[PKQZ4949-D util/__pycache__/ssltransport.cpython-311.pycnu[PKQZRT//( util/__pycache__/timeout.cpython-311.pycnu[PKQZBnEnE$ util/__pycache__/url.cpython-311.pycnu[PKQZha9Fyy%Y util/__pycache__/util.cpython-311.pycnu[PKQZ%b util/__pycache__/wait.cpython-311.pycnu[PKQZ&Qnnq util/connection.pynu[PKQZL|| ? util/proxy.pynu[PKQZ¿ util/request.pynu[PKQZr s'. . ʧ util/response.pynu[PKQZ=q GG 8 util/retry.pynu[PKQZ,K,K ; util/ssl_.pynu[PKQZe$H util/ssl_match_hostname.pynu[PKQZn=0_U#U#_ util/ssltransport.pynu[PKQZix!)!): util/timeout.pynu[PKQZX8m;m;  util/url.pynu[PKQZ_zz B util/util.pynu[PKQZVEGG  util/wait.pynu[PKAA{