Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/requests/adapters.py: 17%

216 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2023-07-17 14:22 -0600

1""" 

2requests.adapters 

3~~~~~~~~~~~~~~~~~ 

4 

5This module contains the transport adapters that Requests uses to define 

6and maintain connections. 

7""" 

8 

9import os.path 

10import socket # noqa: F401 

11 

12from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError 

13from urllib3.exceptions import HTTPError as _HTTPError 

14from urllib3.exceptions import InvalidHeader as _InvalidHeader 

15from urllib3.exceptions import ( 

16 LocationValueError, 

17 MaxRetryError, 

18 NewConnectionError, 

19 ProtocolError, 

20) 

21from urllib3.exceptions import ProxyError as _ProxyError 

22from urllib3.exceptions import ReadTimeoutError, ResponseError 

23from urllib3.exceptions import SSLError as _SSLError 

24from urllib3.poolmanager import PoolManager, proxy_from_url 

25from urllib3.response import HTTPResponse 

26from urllib3.util import Timeout as TimeoutSauce 

27from urllib3.util import parse_url 

28from urllib3.util.retry import Retry 

29 

30from .auth import _basic_auth_str 

31from .compat import basestring, urlparse 

32from .cookies import extract_cookies_to_jar 

33from .exceptions import ( 

34 ConnectionError, 

35 ConnectTimeout, 

36 InvalidHeader, 

37 InvalidProxyURL, 

38 InvalidSchema, 

39 InvalidURL, 

40 ProxyError, 

41 ReadTimeout, 

42 RetryError, 

43 SSLError, 

44) 

45from .models import Response 

46from .structures import CaseInsensitiveDict 

47from .utils import ( 

48 DEFAULT_CA_BUNDLE_PATH, 

49 extract_zipped_paths, 

50 get_auth_from_url, 

51 get_encoding_from_headers, 

52 prepend_scheme_if_needed, 

53 select_proxy, 

54 urldefragauth, 

55) 

56 

57try: 

58 from urllib3.contrib.socks import SOCKSProxyManager 

59except ImportError: 

60 

61 def SOCKSProxyManager(*args, **kwargs): 

62 raise InvalidSchema("Missing dependencies for SOCKS support.") 

63 

64 

65DEFAULT_POOLBLOCK = False 

66DEFAULT_POOLSIZE = 10 

67DEFAULT_RETRIES = 0 

68DEFAULT_POOL_TIMEOUT = None 

69 

70 

71class BaseAdapter: 

72 """The Base Transport Adapter""" 

73 

74 def __init__(self): 

75 super().__init__() 

76 

77 def send( 

78 self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None 

79 ): 

80 """Sends PreparedRequest object. Returns Response object. 

81 

82 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. 

83 :param stream: (optional) Whether to stream the request content. 

84 :param timeout: (optional) How long to wait for the server to send 

85 data before giving up, as a float, or a :ref:`(connect timeout, 

86 read timeout) <timeouts>` tuple. 

87 :type timeout: float or tuple 

88 :param verify: (optional) Either a boolean, in which case it controls whether we verify 

89 the server's TLS certificate, or a string, in which case it must be a path 

90 to a CA bundle to use 

91 :param cert: (optional) Any user-provided SSL certificate to be trusted. 

92 :param proxies: (optional) The proxies dictionary to apply to the request. 

93 """ 

94 raise NotImplementedError 

95 

96 def close(self): 

97 """Cleans up adapter specific items.""" 

98 raise NotImplementedError 

99 

100 

101class HTTPAdapter(BaseAdapter): 

102 """The built-in HTTP Adapter for urllib3. 

103 

104 Provides a general-case interface for Requests sessions to contact HTTP and 

105 HTTPS urls by implementing the Transport Adapter interface. This class will 

106 usually be created by the :class:`Session <Session>` class under the 

107 covers. 

108 

109 :param pool_connections: The number of urllib3 connection pools to cache. 

110 :param pool_maxsize: The maximum number of connections to save in the pool. 

111 :param max_retries: The maximum number of retries each connection 

112 should attempt. Note, this applies only to failed DNS lookups, socket 

113 connections and connection timeouts, never to requests where data has 

114 made it to the server. By default, Requests does not retry failed 

115 connections. If you need granular control over the conditions under 

116 which we retry a request, import urllib3's ``Retry`` class and pass 

117 that instead. 

118 :param pool_block: Whether the connection pool should block for connections. 

119 

120 Usage:: 

121 

122 >>> import requests 

123 >>> s = requests.Session() 

124 >>> a = requests.adapters.HTTPAdapter(max_retries=3) 

125 >>> s.mount('http://', a) 

126 """ 

127 

128 __attrs__ = [ 

129 "max_retries", 

130 "config", 

131 "_pool_connections", 

132 "_pool_maxsize", 

133 "_pool_block", 

134 ] 

135 

136 def __init__( 

137 self, 

138 pool_connections=DEFAULT_POOLSIZE, 

139 pool_maxsize=DEFAULT_POOLSIZE, 

140 max_retries=DEFAULT_RETRIES, 

141 pool_block=DEFAULT_POOLBLOCK, 

142 ): 

143 if max_retries == DEFAULT_RETRIES: 

144 self.max_retries = Retry(0, read=False) 

145 else: 

146 self.max_retries = Retry.from_int(max_retries) 

147 self.config = {} 

148 self.proxy_manager = {} 

149 

150 super().__init__() 

151 

152 self._pool_connections = pool_connections 

153 self._pool_maxsize = pool_maxsize 

154 self._pool_block = pool_block 

155 

156 self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) 

157 

158 def __getstate__(self): 

159 return {attr: getattr(self, attr, None) for attr in self.__attrs__} 

160 

161 def __setstate__(self, state): 

162 # Can't handle by adding 'proxy_manager' to self.__attrs__ because 

163 # self.poolmanager uses a lambda function, which isn't pickleable. 

164 self.proxy_manager = {} 

165 self.config = {} 

166 

167 for attr, value in state.items(): 

168 setattr(self, attr, value) 

169 

170 self.init_poolmanager( 

171 self._pool_connections, self._pool_maxsize, block=self._pool_block 

172 ) 

173 

174 def init_poolmanager( 

175 self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs 

176 ): 

177 """Initializes a urllib3 PoolManager. 

178 

179 This method should not be called from user code, and is only 

180 exposed for use when subclassing the 

181 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

182 

183 :param connections: The number of urllib3 connection pools to cache. 

184 :param maxsize: The maximum number of connections to save in the pool. 

185 :param block: Block when no free connections are available. 

186 :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. 

187 """ 

188 # save these values for pickling 

189 self._pool_connections = connections 

190 self._pool_maxsize = maxsize 

191 self._pool_block = block 

192 

193 self.poolmanager = PoolManager( 

194 num_pools=connections, 

195 maxsize=maxsize, 

196 block=block, 

197 strict=True, 

198 **pool_kwargs, 

199 ) 

200 

201 def proxy_manager_for(self, proxy, **proxy_kwargs): 

202 """Return urllib3 ProxyManager for the given proxy. 

203 

204 This method should not be called from user code, and is only 

205 exposed for use when subclassing the 

206 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

207 

208 :param proxy: The proxy to return a urllib3 ProxyManager for. 

209 :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. 

210 :returns: ProxyManager 

211 :rtype: urllib3.ProxyManager 

212 """ 

213 if proxy in self.proxy_manager: 

214 manager = self.proxy_manager[proxy] 

215 elif proxy.lower().startswith("socks"): 

216 username, password = get_auth_from_url(proxy) 

217 manager = self.proxy_manager[proxy] = SOCKSProxyManager( 

218 proxy, 

219 username=username, 

220 password=password, 

221 num_pools=self._pool_connections, 

222 maxsize=self._pool_maxsize, 

223 block=self._pool_block, 

224 **proxy_kwargs, 

225 ) 

226 else: 

227 proxy_headers = self.proxy_headers(proxy) 

228 manager = self.proxy_manager[proxy] = proxy_from_url( 

229 proxy, 

230 proxy_headers=proxy_headers, 

231 num_pools=self._pool_connections, 

232 maxsize=self._pool_maxsize, 

233 block=self._pool_block, 

234 **proxy_kwargs, 

235 ) 

236 

237 return manager 

238 

239 def cert_verify(self, conn, url, verify, cert): 

240 """Verify a SSL certificate. This method should not be called from user 

241 code, and is only exposed for use when subclassing the 

242 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

243 

244 :param conn: The urllib3 connection object associated with the cert. 

245 :param url: The requested URL. 

246 :param verify: Either a boolean, in which case it controls whether we verify 

247 the server's TLS certificate, or a string, in which case it must be a path 

248 to a CA bundle to use 

249 :param cert: The SSL certificate to verify. 

250 """ 

251 if url.lower().startswith("https") and verify: 

252 

253 cert_loc = None 

254 

255 # Allow self-specified cert location. 

256 if verify is not True: 

257 cert_loc = verify 

258 

259 if not cert_loc: 

260 cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) 

261 

262 if not cert_loc or not os.path.exists(cert_loc): 

263 raise OSError( 

264 f"Could not find a suitable TLS CA certificate bundle, " 

265 f"invalid path: {cert_loc}" 

266 ) 

267 

268 conn.cert_reqs = "CERT_REQUIRED" 

269 

270 if not os.path.isdir(cert_loc): 

271 conn.ca_certs = cert_loc 

272 else: 

273 conn.ca_cert_dir = cert_loc 

274 else: 

275 conn.cert_reqs = "CERT_NONE" 

276 conn.ca_certs = None 

277 conn.ca_cert_dir = None 

278 

279 if cert: 

280 if not isinstance(cert, basestring): 

281 conn.cert_file = cert[0] 

282 conn.key_file = cert[1] 

283 else: 

284 conn.cert_file = cert 

285 conn.key_file = None 

286 if conn.cert_file and not os.path.exists(conn.cert_file): 

287 raise OSError( 

288 f"Could not find the TLS certificate file, " 

289 f"invalid path: {conn.cert_file}" 

290 ) 

291 if conn.key_file and not os.path.exists(conn.key_file): 

292 raise OSError( 

293 f"Could not find the TLS key file, invalid path: {conn.key_file}" 

294 ) 

295 

296 def build_response(self, req, resp): 

297 """Builds a :class:`Response <requests.Response>` object from a urllib3 

298 response. This should not be called from user code, and is only exposed 

299 for use when subclassing the 

300 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` 

301 

302 :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. 

303 :param resp: The urllib3 response object. 

304 :rtype: requests.Response 

305 """ 

306 response = Response() 

307 

308 # Fallback to None if there's no status_code, for whatever reason. 

309 response.status_code = getattr(resp, "status", None) 

310 

311 # Make headers case-insensitive. 

312 response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) 

313 

314 # Set encoding. 

315 response.encoding = get_encoding_from_headers(response.headers) 

316 response.raw = resp 

317 response.reason = response.raw.reason 

318 

319 if isinstance(req.url, bytes): 

320 response.url = req.url.decode("utf-8") 

321 else: 

322 response.url = req.url 

323 

324 # Add new cookies from the server. 

325 extract_cookies_to_jar(response.cookies, req, resp) 

326 

327 # Give the Response some context. 

328 response.request = req 

329 response.connection = self 

330 

331 return response 

332 

333 def get_connection(self, url, proxies=None): 

334 """Returns a urllib3 connection for the given URL. This should not be 

335 called from user code, and is only exposed for use when subclassing the 

336 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

337 

338 :param url: The URL to connect to. 

339 :param proxies: (optional) A Requests-style dictionary of proxies used on this request. 

340 :rtype: urllib3.ConnectionPool 

341 """ 

342 proxy = select_proxy(url, proxies) 

343 

344 if proxy: 

345 proxy = prepend_scheme_if_needed(proxy, "http") 

346 proxy_url = parse_url(proxy) 

347 if not proxy_url.host: 

348 raise InvalidProxyURL( 

349 "Please check proxy URL. It is malformed " 

350 "and could be missing the host." 

351 ) 

352 proxy_manager = self.proxy_manager_for(proxy) 

353 conn = proxy_manager.connection_from_url(url) 

354 else: 

355 # Only scheme should be lower case 

356 parsed = urlparse(url) 

357 url = parsed.geturl() 

358 conn = self.poolmanager.connection_from_url(url) 

359 

360 return conn 

361 

362 def close(self): 

363 """Disposes of any internal state. 

364 

365 Currently, this closes the PoolManager and any active ProxyManager, 

366 which closes any pooled connections. 

367 """ 

368 self.poolmanager.clear() 

369 for proxy in self.proxy_manager.values(): 

370 proxy.clear() 

371 

372 def request_url(self, request, proxies): 

373 """Obtain the url to use when making the final request. 

374 

375 If the message is being sent through a HTTP proxy, the full URL has to 

376 be used. Otherwise, we should only use the path portion of the URL. 

377 

378 This should not be called from user code, and is only exposed for use 

379 when subclassing the 

380 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

381 

382 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. 

383 :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. 

384 :rtype: str 

385 """ 

386 proxy = select_proxy(request.url, proxies) 

387 scheme = urlparse(request.url).scheme 

388 

389 is_proxied_http_request = proxy and scheme != "https" 

390 using_socks_proxy = False 

391 if proxy: 

392 proxy_scheme = urlparse(proxy).scheme.lower() 

393 using_socks_proxy = proxy_scheme.startswith("socks") 

394 

395 url = request.path_url 

396 if is_proxied_http_request and not using_socks_proxy: 

397 url = urldefragauth(request.url) 

398 

399 return url 

400 

401 def add_headers(self, request, **kwargs): 

402 """Add any headers needed by the connection. As of v2.0 this does 

403 nothing by default, but is left for overriding by users that subclass 

404 the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

405 

406 This should not be called from user code, and is only exposed for use 

407 when subclassing the 

408 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

409 

410 :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. 

411 :param kwargs: The keyword arguments from the call to send(). 

412 """ 

413 pass 

414 

415 def proxy_headers(self, proxy): 

416 """Returns a dictionary of the headers to add to any request sent 

417 through a proxy. This works with urllib3 magic to ensure that they are 

418 correctly sent to the proxy, rather than in a tunnelled request if 

419 CONNECT is being used. 

420 

421 This should not be called from user code, and is only exposed for use 

422 when subclassing the 

423 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

424 

425 :param proxy: The url of the proxy being used for this request. 

426 :rtype: dict 

427 """ 

428 headers = {} 

429 username, password = get_auth_from_url(proxy) 

430 

431 if username: 

432 headers["Proxy-Authorization"] = _basic_auth_str(username, password) 

433 

434 return headers 

435 

436 def send( 

437 self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None 

438 ): 

439 """Sends PreparedRequest object. Returns Response object. 

440 

441 :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. 

442 :param stream: (optional) Whether to stream the request content. 

443 :param timeout: (optional) How long to wait for the server to send 

444 data before giving up, as a float, or a :ref:`(connect timeout, 

445 read timeout) <timeouts>` tuple. 

446 :type timeout: float or tuple or urllib3 Timeout object 

447 :param verify: (optional) Either a boolean, in which case it controls whether 

448 we verify the server's TLS certificate, or a string, in which case it 

449 must be a path to a CA bundle to use 

450 :param cert: (optional) Any user-provided SSL certificate to be trusted. 

451 :param proxies: (optional) The proxies dictionary to apply to the request. 

452 :rtype: requests.Response 

453 """ 

454 

455 try: 

456 conn = self.get_connection(request.url, proxies) 

457 except LocationValueError as e: 

458 raise InvalidURL(e, request=request) 

459 

460 self.cert_verify(conn, request.url, verify, cert) 

461 url = self.request_url(request, proxies) 

462 self.add_headers( 

463 request, 

464 stream=stream, 

465 timeout=timeout, 

466 verify=verify, 

467 cert=cert, 

468 proxies=proxies, 

469 ) 

470 

471 chunked = not (request.body is None or "Content-Length" in request.headers) 

472 

473 if isinstance(timeout, tuple): 

474 try: 

475 connect, read = timeout 

476 timeout = TimeoutSauce(connect=connect, read=read) 

477 except ValueError: 

478 raise ValueError( 

479 f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " 

480 f"or a single float to set both timeouts to the same value." 

481 ) 

482 elif isinstance(timeout, TimeoutSauce): 

483 pass 

484 else: 

485 timeout = TimeoutSauce(connect=timeout, read=timeout) 

486 

487 try: 

488 if not chunked: 

489 resp = conn.urlopen( 

490 method=request.method, 

491 url=url, 

492 body=request.body, 

493 headers=request.headers, 

494 redirect=False, 

495 assert_same_host=False, 

496 preload_content=False, 

497 decode_content=False, 

498 retries=self.max_retries, 

499 timeout=timeout, 

500 ) 

501 

502 # Send the request. 

503 else: 

504 if hasattr(conn, "proxy_pool"): 

505 conn = conn.proxy_pool 

506 

507 low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) 

508 

509 try: 

510 skip_host = "Host" in request.headers 

511 low_conn.putrequest( 

512 request.method, 

513 url, 

514 skip_accept_encoding=True, 

515 skip_host=skip_host, 

516 ) 

517 

518 for header, value in request.headers.items(): 

519 low_conn.putheader(header, value) 

520 

521 low_conn.endheaders() 

522 

523 for i in request.body: 

524 low_conn.send(hex(len(i))[2:].encode("utf-8")) 

525 low_conn.send(b"\r\n") 

526 low_conn.send(i) 

527 low_conn.send(b"\r\n") 

528 low_conn.send(b"0\r\n\r\n") 

529 

530 # Receive the response from the server 

531 r = low_conn.getresponse() 

532 

533 resp = HTTPResponse.from_httplib( 

534 r, 

535 pool=conn, 

536 connection=low_conn, 

537 preload_content=False, 

538 decode_content=False, 

539 ) 

540 except Exception: 

541 # If we hit any problems here, clean up the connection. 

542 # Then, raise so that we can handle the actual exception. 

543 low_conn.close() 

544 raise 

545 

546 except (ProtocolError, OSError) as err: 

547 raise ConnectionError(err, request=request) 

548 

549 except MaxRetryError as e: 

550 if isinstance(e.reason, ConnectTimeoutError): 

551 # TODO: Remove this in 3.0.0: see #2811 

552 if not isinstance(e.reason, NewConnectionError): 

553 raise ConnectTimeout(e, request=request) 

554 

555 if isinstance(e.reason, ResponseError): 

556 raise RetryError(e, request=request) 

557 

558 if isinstance(e.reason, _ProxyError): 

559 raise ProxyError(e, request=request) 

560 

561 if isinstance(e.reason, _SSLError): 

562 # This branch is for urllib3 v1.22 and later. 

563 raise SSLError(e, request=request) 

564 

565 raise ConnectionError(e, request=request) 

566 

567 except ClosedPoolError as e: 

568 raise ConnectionError(e, request=request) 

569 

570 except _ProxyError as e: 

571 raise ProxyError(e) 

572 

573 except (_SSLError, _HTTPError) as e: 

574 if isinstance(e, _SSLError): 

575 # This branch is for urllib3 versions earlier than v1.22 

576 raise SSLError(e, request=request) 

577 elif isinstance(e, ReadTimeoutError): 

578 raise ReadTimeout(e, request=request) 

579 elif isinstance(e, _InvalidHeader): 

580 raise InvalidHeader(e, request=request) 

581 else: 

582 raise 

583 

584 return self.build_response(request, resp)