Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/urllib3/connectionpool.py: 15%

336 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2023-07-17 14:22 -0600

1from __future__ import absolute_import 

2 

3import errno 

4import logging 

5import re 

6import socket 

7import sys 

8import warnings 

9from socket import error as SocketError 

10from socket import timeout as SocketTimeout 

11 

12from .connection import ( 

13 BaseSSLError, 

14 BrokenPipeError, 

15 DummyConnection, 

16 HTTPConnection, 

17 HTTPException, 

18 HTTPSConnection, 

19 VerifiedHTTPSConnection, 

20 port_by_scheme, 

21) 

22from .exceptions import ( 

23 ClosedPoolError, 

24 EmptyPoolError, 

25 HeaderParsingError, 

26 HostChangedError, 

27 InsecureRequestWarning, 

28 LocationValueError, 

29 MaxRetryError, 

30 NewConnectionError, 

31 ProtocolError, 

32 ProxyError, 

33 ReadTimeoutError, 

34 SSLError, 

35 TimeoutError, 

36) 

37from .packages import six 

38from .packages.six.moves import queue 

39from .request import RequestMethods 

40from .response import HTTPResponse 

41from .util.connection import is_connection_dropped 

42from .util.proxy import connection_requires_http_tunnel 

43from .util.queue import LifoQueue 

44from .util.request import set_file_position 

45from .util.response import assert_header_parsing 

46from .util.retry import Retry 

47from .util.ssl_match_hostname import CertificateError 

48from .util.timeout import Timeout 

49from .util.url import Url, _encode_target 

50from .util.url import _normalize_host as normalize_host 

51from .util.url import get_host, parse_url 

52 

53xrange = six.moves.xrange 

54 

55log = logging.getLogger(__name__) 

56 

57_Default = object() 

58 

59 

60# Pool objects 

61class ConnectionPool(object): 

62 """ 

63 Base class for all connection pools, such as 

64 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. 

65 

66 .. note:: 

67 ConnectionPool.urlopen() does not normalize or percent-encode target URIs 

68 which is useful if your target server doesn't support percent-encoded 

69 target URIs. 

70 """ 

71 

72 scheme = None 

73 QueueCls = LifoQueue 

74 

75 def __init__(self, host, port=None): 

76 if not host: 

77 raise LocationValueError("No host specified.") 

78 

79 self.host = _normalize_host(host, scheme=self.scheme) 

80 self._proxy_host = host.lower() 

81 self.port = port 

82 

83 def __str__(self): 

84 return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) 

85 

86 def __enter__(self): 

87 return self 

88 

89 def __exit__(self, exc_type, exc_val, exc_tb): 

90 self.close() 

91 # Return False to re-raise any potential exceptions 

92 return False 

93 

94 def close(self): 

95 """ 

96 Close all pooled connections and disable the pool. 

97 """ 

98 pass 

99 

100 

101# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 

102_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} 

103 

104 

105class HTTPConnectionPool(ConnectionPool, RequestMethods): 

106 """ 

107 Thread-safe connection pool for one host. 

108 

109 :param host: 

110 Host used for this HTTP Connection (e.g. "localhost"), passed into 

111 :class:`http.client.HTTPConnection`. 

112 

113 :param port: 

114 Port used for this HTTP Connection (None is equivalent to 80), passed 

115 into :class:`http.client.HTTPConnection`. 

116 

117 :param strict: 

118 Causes BadStatusLine to be raised if the status line can't be parsed 

119 as a valid HTTP/1.0 or 1.1 status line, passed into 

120 :class:`http.client.HTTPConnection`. 

121 

122 .. note:: 

123 Only works in Python 2. This parameter is ignored in Python 3. 

124 

125 :param timeout: 

126 Socket timeout in seconds for each individual connection. This can 

127 be a float or integer, which sets the timeout for the HTTP request, 

128 or an instance of :class:`urllib3.util.Timeout` which gives you more 

129 fine-grained control over request timeouts. After the constructor has 

130 been parsed, this is always a `urllib3.util.Timeout` object. 

131 

132 :param maxsize: 

133 Number of connections to save that can be reused. More than 1 is useful 

134 in multithreaded situations. If ``block`` is set to False, more 

135 connections will be created but they will not be saved once they've 

136 been used. 

137 

138 :param block: 

139 If set to True, no more than ``maxsize`` connections will be used at 

140 a time. When no free connections are available, the call will block 

141 until a connection has been released. This is a useful side effect for 

142 particular multithreaded situations where one does not want to use more 

143 than maxsize connections per host to prevent flooding. 

144 

145 :param headers: 

146 Headers to include with all requests, unless other headers are given 

147 explicitly. 

148 

149 :param retries: 

150 Retry configuration to use by default with requests in this pool. 

151 

152 :param _proxy: 

153 Parsed proxy URL, should not be used directly, instead, see 

154 :class:`urllib3.ProxyManager` 

155 

156 :param _proxy_headers: 

157 A dictionary with proxy headers, should not be used directly, 

158 instead, see :class:`urllib3.ProxyManager` 

159 

160 :param \\**conn_kw: 

161 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, 

162 :class:`urllib3.connection.HTTPSConnection` instances. 

163 """ 

164 

165 scheme = "http" 

166 ConnectionCls = HTTPConnection 

167 ResponseCls = HTTPResponse 

168 

169 def __init__( 

170 self, 

171 host, 

172 port=None, 

173 strict=False, 

174 timeout=Timeout.DEFAULT_TIMEOUT, 

175 maxsize=1, 

176 block=False, 

177 headers=None, 

178 retries=None, 

179 _proxy=None, 

180 _proxy_headers=None, 

181 _proxy_config=None, 

182 **conn_kw 

183 ): 

184 ConnectionPool.__init__(self, host, port) 

185 RequestMethods.__init__(self, headers) 

186 

187 self.strict = strict 

188 

189 if not isinstance(timeout, Timeout): 

190 timeout = Timeout.from_float(timeout) 

191 

192 if retries is None: 

193 retries = Retry.DEFAULT 

194 

195 self.timeout = timeout 

196 self.retries = retries 

197 

198 self.pool = self.QueueCls(maxsize) 

199 self.block = block 

200 

201 self.proxy = _proxy 

202 self.proxy_headers = _proxy_headers or {} 

203 self.proxy_config = _proxy_config 

204 

205 # Fill the queue up so that doing get() on it will block properly 

206 for _ in xrange(maxsize): 

207 self.pool.put(None) 

208 

209 # These are mostly for testing and debugging purposes. 

210 self.num_connections = 0 

211 self.num_requests = 0 

212 self.conn_kw = conn_kw 

213 

214 if self.proxy: 

215 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. 

216 # We cannot know if the user has added default socket options, so we cannot replace the 

217 # list. 

218 self.conn_kw.setdefault("socket_options", []) 

219 

220 self.conn_kw["proxy"] = self.proxy 

221 self.conn_kw["proxy_config"] = self.proxy_config 

222 

223 def _new_conn(self): 

224 """ 

225 Return a fresh :class:`HTTPConnection`. 

226 """ 

227 self.num_connections += 1 

228 log.debug( 

229 "Starting new HTTP connection (%d): %s:%s", 

230 self.num_connections, 

231 self.host, 

232 self.port or "80", 

233 ) 

234 

235 conn = self.ConnectionCls( 

236 host=self.host, 

237 port=self.port, 

238 timeout=self.timeout.connect_timeout, 

239 strict=self.strict, 

240 **self.conn_kw 

241 ) 

242 return conn 

243 

244 def _get_conn(self, timeout=None): 

245 """ 

246 Get a connection. Will return a pooled connection if one is available. 

247 

248 If no connections are available and :prop:`.block` is ``False``, then a 

249 fresh connection is returned. 

250 

251 :param timeout: 

252 Seconds to wait before giving up and raising 

253 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and 

254 :prop:`.block` is ``True``. 

255 """ 

256 conn = None 

257 try: 

258 conn = self.pool.get(block=self.block, timeout=timeout) 

259 

260 except AttributeError: # self.pool is None 

261 raise ClosedPoolError(self, "Pool is closed.") 

262 

263 except queue.Empty: 

264 if self.block: 

265 raise EmptyPoolError( 

266 self, 

267 "Pool reached maximum size and no more connections are allowed.", 

268 ) 

269 pass # Oh well, we'll create a new connection then 

270 

271 # If this is a persistent connection, check if it got disconnected 

272 if conn and is_connection_dropped(conn): 

273 log.debug("Resetting dropped connection: %s", self.host) 

274 conn.close() 

275 if getattr(conn, "auto_open", 1) == 0: 

276 # This is a proxied connection that has been mutated by 

277 # http.client._tunnel() and cannot be reused (since it would 

278 # attempt to bypass the proxy) 

279 conn = None 

280 

281 return conn or self._new_conn() 

282 

283 def _put_conn(self, conn): 

284 """ 

285 Put a connection back into the pool. 

286 

287 :param conn: 

288 Connection object for the current host and port as returned by 

289 :meth:`._new_conn` or :meth:`._get_conn`. 

290 

291 If the pool is already full, the connection is closed and discarded 

292 because we exceeded maxsize. If connections are discarded frequently, 

293 then maxsize should be increased. 

294 

295 If the pool is closed, then the connection will be closed and discarded. 

296 """ 

297 try: 

298 self.pool.put(conn, block=False) 

299 return # Everything is dandy, done. 

300 except AttributeError: 

301 # self.pool is None. 

302 pass 

303 except queue.Full: 

304 # This should never happen if self.block == True 

305 log.warning( 

306 "Connection pool is full, discarding connection: %s. Connection pool size: %s", 

307 self.host, 

308 self.pool.qsize(), 

309 ) 

310 # Connection never got put back into the pool, close it. 

311 if conn: 

312 conn.close() 

313 

314 def _validate_conn(self, conn): 

315 """ 

316 Called right before a request is made, after the socket is created. 

317 """ 

318 pass 

319 

320 def _prepare_proxy(self, conn): 

321 # Nothing to do for HTTP connections. 

322 pass 

323 

324 def _get_timeout(self, timeout): 

325 """Helper that always returns a :class:`urllib3.util.Timeout`""" 

326 if timeout is _Default: 

327 return self.timeout.clone() 

328 

329 if isinstance(timeout, Timeout): 

330 return timeout.clone() 

331 else: 

332 # User passed us an int/float. This is for backwards compatibility, 

333 # can be removed later 

334 return Timeout.from_float(timeout) 

335 

336 def _raise_timeout(self, err, url, timeout_value): 

337 """Is the error actually a timeout? Will raise a ReadTimeout or pass""" 

338 

339 if isinstance(err, SocketTimeout): 

340 raise ReadTimeoutError( 

341 self, url, "Read timed out. (read timeout=%s)" % timeout_value 

342 ) 

343 

344 # See the above comment about EAGAIN in Python 3. In Python 2 we have 

345 # to specifically catch it and throw the timeout error 

346 if hasattr(err, "errno") and err.errno in _blocking_errnos: 

347 raise ReadTimeoutError( 

348 self, url, "Read timed out. (read timeout=%s)" % timeout_value 

349 ) 

350 

351 # Catch possible read timeouts thrown as SSL errors. If not the 

352 # case, rethrow the original. We need to do this because of: 

353 # http://bugs.python.org/issue10272 

354 if "timed out" in str(err) or "did not complete (read)" in str( 

355 err 

356 ): # Python < 2.7.4 

357 raise ReadTimeoutError( 

358 self, url, "Read timed out. (read timeout=%s)" % timeout_value 

359 ) 

360 

361 def _make_request( 

362 self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw 

363 ): 

364 """ 

365 Perform a request on a given urllib connection object taken from our 

366 pool. 

367 

368 :param conn: 

369 a connection from one of our connection pools 

370 

371 :param timeout: 

372 Socket timeout in seconds for the request. This can be a 

373 float or integer, which will set the same timeout value for 

374 the socket connect and the socket read, or an instance of 

375 :class:`urllib3.util.Timeout`, which gives you more fine-grained 

376 control over your timeouts. 

377 """ 

378 self.num_requests += 1 

379 

380 timeout_obj = self._get_timeout(timeout) 

381 timeout_obj.start_connect() 

382 conn.timeout = timeout_obj.connect_timeout 

383 

384 # Trigger any extra validation we need to do. 

385 try: 

386 self._validate_conn(conn) 

387 except (SocketTimeout, BaseSSLError) as e: 

388 # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. 

389 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) 

390 raise 

391 

392 # conn.request() calls http.client.*.request, not the method in 

393 # urllib3.request. It also calls makefile (recv) on the socket. 

394 try: 

395 if chunked: 

396 conn.request_chunked(method, url, **httplib_request_kw) 

397 else: 

398 conn.request(method, url, **httplib_request_kw) 

399 

400 # We are swallowing BrokenPipeError (errno.EPIPE) since the server is 

401 # legitimately able to close the connection after sending a valid response. 

402 # With this behaviour, the received response is still readable. 

403 except BrokenPipeError: 

404 # Python 3 

405 pass 

406 except IOError as e: 

407 # Python 2 and macOS/Linux 

408 # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS 

409 # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ 

410 if e.errno not in { 

411 errno.EPIPE, 

412 errno.ESHUTDOWN, 

413 errno.EPROTOTYPE, 

414 }: 

415 raise 

416 

417 # Reset the timeout for the recv() on the socket 

418 read_timeout = timeout_obj.read_timeout 

419 

420 # App Engine doesn't have a sock attr 

421 if getattr(conn, "sock", None): 

422 # In Python 3 socket.py will catch EAGAIN and return None when you 

423 # try and read into the file pointer created by http.client, which 

424 # instead raises a BadStatusLine exception. Instead of catching 

425 # the exception and assuming all BadStatusLine exceptions are read 

426 # timeouts, check for a zero timeout before making the request. 

427 if read_timeout == 0: 

428 raise ReadTimeoutError( 

429 self, url, "Read timed out. (read timeout=%s)" % read_timeout 

430 ) 

431 if read_timeout is Timeout.DEFAULT_TIMEOUT: 

432 conn.sock.settimeout(socket.getdefaulttimeout()) 

433 else: # None or a value 

434 conn.sock.settimeout(read_timeout) 

435 

436 # Receive the response from the server 

437 try: 

438 try: 

439 # Python 2.7, use buffering of HTTP responses 

440 httplib_response = conn.getresponse(buffering=True) 

441 except TypeError: 

442 # Python 3 

443 try: 

444 httplib_response = conn.getresponse() 

445 except BaseException as e: 

446 # Remove the TypeError from the exception chain in 

447 # Python 3 (including for exceptions like SystemExit). 

448 # Otherwise it looks like a bug in the code. 

449 six.raise_from(e, None) 

450 except (SocketTimeout, BaseSSLError, SocketError) as e: 

451 self._raise_timeout(err=e, url=url, timeout_value=read_timeout) 

452 raise 

453 

454 # AppEngine doesn't have a version attr. 

455 http_version = getattr(conn, "_http_vsn_str", "HTTP/?") 

456 log.debug( 

457 '%s://%s:%s "%s %s %s" %s %s', 

458 self.scheme, 

459 self.host, 

460 self.port, 

461 method, 

462 url, 

463 http_version, 

464 httplib_response.status, 

465 httplib_response.length, 

466 ) 

467 

468 try: 

469 assert_header_parsing(httplib_response.msg) 

470 except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 

471 log.warning( 

472 "Failed to parse headers (url=%s): %s", 

473 self._absolute_url(url), 

474 hpe, 

475 exc_info=True, 

476 ) 

477 

478 return httplib_response 

479 

480 def _absolute_url(self, path): 

481 return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url 

482 

483 def close(self): 

484 """ 

485 Close all pooled connections and disable the pool. 

486 """ 

487 if self.pool is None: 

488 return 

489 # Disable access to the pool 

490 old_pool, self.pool = self.pool, None 

491 

492 try: 

493 while True: 

494 conn = old_pool.get(block=False) 

495 if conn: 

496 conn.close() 

497 

498 except queue.Empty: 

499 pass # Done. 

500 

501 def is_same_host(self, url): 

502 """ 

503 Check if the given ``url`` is a member of the same host as this 

504 connection pool. 

505 """ 

506 if url.startswith("/"): 

507 return True 

508 

509 # TODO: Add optional support for socket.gethostbyname checking. 

510 scheme, host, port = get_host(url) 

511 if host is not None: 

512 host = _normalize_host(host, scheme=scheme) 

513 

514 # Use explicit default port for comparison when none is given 

515 if self.port and not port: 

516 port = port_by_scheme.get(scheme) 

517 elif not self.port and port == port_by_scheme.get(scheme): 

518 port = None 

519 

520 return (scheme, host, port) == (self.scheme, self.host, self.port) 

521 

522 def urlopen( 

523 self, 

524 method, 

525 url, 

526 body=None, 

527 headers=None, 

528 retries=None, 

529 redirect=True, 

530 assert_same_host=True, 

531 timeout=_Default, 

532 pool_timeout=None, 

533 release_conn=None, 

534 chunked=False, 

535 body_pos=None, 

536 **response_kw 

537 ): 

538 """ 

539 Get a connection from the pool and perform an HTTP request. This is the 

540 lowest level call for making a request, so you'll need to specify all 

541 the raw details. 

542 

543 .. note:: 

544 

545 More commonly, it's appropriate to use a convenience method provided 

546 by :class:`.RequestMethods`, such as :meth:`request`. 

547 

548 .. note:: 

549 

550 `release_conn` will only behave as expected if 

551 `preload_content=False` because we want to make 

552 `preload_content=False` the default behaviour someday soon without 

553 breaking backwards compatibility. 

554 

555 :param method: 

556 HTTP request method (such as GET, POST, PUT, etc.) 

557 

558 :param url: 

559 The URL to perform the request on. 

560 

561 :param body: 

562 Data to send in the request body, either :class:`str`, :class:`bytes`, 

563 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

564 

565 :param headers: 

566 Dictionary of custom headers to send, such as User-Agent, 

567 If-None-Match, etc. If None, pool headers are used. If provided, 

568 these headers completely replace any pool-specific headers. 

569 

570 :param retries: 

571 Configure the number of retries to allow before raising a 

572 :class:`~urllib3.exceptions.MaxRetryError` exception. 

573 

574 Pass ``None`` to retry until you receive a response. Pass a 

575 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

576 over different types of retries. 

577 Pass an integer number to retry connection errors that many times, 

578 but no other types of errors. Pass zero to never retry. 

579 

580 If ``False``, then retries are disabled and any exception is raised 

581 immediately. Also, instead of raising a MaxRetryError on redirects, 

582 the redirect response will be returned. 

583 

584 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

585 

586 :param redirect: 

587 If True, automatically handle redirects (status codes 301, 302, 

588 303, 307, 308). Each redirect counts as a retry. Disabling retries 

589 will disable redirect, too. 

590 

591 :param assert_same_host: 

592 If ``True``, will make sure that the host of the pool requests is 

593 consistent else will raise HostChangedError. When ``False``, you can 

594 use the pool on an HTTP proxy and request foreign hosts. 

595 

596 :param timeout: 

597 If specified, overrides the default timeout for this one 

598 request. It may be a float (in seconds) or an instance of 

599 :class:`urllib3.util.Timeout`. 

600 

601 :param pool_timeout: 

602 If set and the pool is set to block=True, then this method will 

603 block for ``pool_timeout`` seconds and raise EmptyPoolError if no 

604 connection is available within the time period. 

605 

606 :param release_conn: 

607 If False, then the urlopen call will not release the connection 

608 back into the pool once a response is received (but will release if 

609 you read the entire contents of the response such as when 

610 `preload_content=True`). This is useful if you're not preloading 

611 the response's content immediately. You will need to call 

612 ``r.release_conn()`` on the response ``r`` to return the connection 

613 back into the pool. If None, it takes the value of 

614 ``response_kw.get('preload_content', True)``. 

615 

616 :param chunked: 

617 If True, urllib3 will send the body using chunked transfer 

618 encoding. Otherwise, urllib3 will send the body using the standard 

619 content-length form. Defaults to False. 

620 

621 :param int body_pos: 

622 Position to seek to in file-like body in the event of a retry or 

623 redirect. Typically this won't need to be set because urllib3 will 

624 auto-populate the value when needed. 

625 

626 :param \\**response_kw: 

627 Additional parameters are passed to 

628 :meth:`urllib3.response.HTTPResponse.from_httplib` 

629 """ 

630 

631 parsed_url = parse_url(url) 

632 destination_scheme = parsed_url.scheme 

633 

634 if headers is None: 

635 headers = self.headers 

636 

637 if not isinstance(retries, Retry): 

638 retries = Retry.from_int(retries, redirect=redirect, default=self.retries) 

639 

640 if release_conn is None: 

641 release_conn = response_kw.get("preload_content", True) 

642 

643 # Check host 

644 if assert_same_host and not self.is_same_host(url): 

645 raise HostChangedError(self, url, retries) 

646 

647 # Ensure that the URL we're connecting to is properly encoded 

648 if url.startswith("/"): 

649 url = six.ensure_str(_encode_target(url)) 

650 else: 

651 url = six.ensure_str(parsed_url.url) 

652 

653 conn = None 

654 

655 # Track whether `conn` needs to be released before 

656 # returning/raising/recursing. Update this variable if necessary, and 

657 # leave `release_conn` constant throughout the function. That way, if 

658 # the function recurses, the original value of `release_conn` will be 

659 # passed down into the recursive call, and its value will be respected. 

660 # 

661 # See issue #651 [1] for details. 

662 # 

663 # [1] <https://github.com/urllib3/urllib3/issues/651> 

664 release_this_conn = release_conn 

665 

666 http_tunnel_required = connection_requires_http_tunnel( 

667 self.proxy, self.proxy_config, destination_scheme 

668 ) 

669 

670 # Merge the proxy headers. Only done when not using HTTP CONNECT. We 

671 # have to copy the headers dict so we can safely change it without those 

672 # changes being reflected in anyone else's copy. 

673 if not http_tunnel_required: 

674 headers = headers.copy() 

675 headers.update(self.proxy_headers) 

676 

677 # Must keep the exception bound to a separate variable or else Python 3 

678 # complains about UnboundLocalError. 

679 err = None 

680 

681 # Keep track of whether we cleanly exited the except block. This 

682 # ensures we do proper cleanup in finally. 

683 clean_exit = False 

684 

685 # Rewind body position, if needed. Record current position 

686 # for future rewinds in the event of a redirect/retry. 

687 body_pos = set_file_position(body, body_pos) 

688 

689 try: 

690 # Request a connection from the queue. 

691 timeout_obj = self._get_timeout(timeout) 

692 conn = self._get_conn(timeout=pool_timeout) 

693 

694 conn.timeout = timeout_obj.connect_timeout 

695 

696 is_new_proxy_conn = self.proxy is not None and not getattr( 

697 conn, "sock", None 

698 ) 

699 if is_new_proxy_conn and http_tunnel_required: 

700 self._prepare_proxy(conn) 

701 

702 # Make the request on the httplib connection object. 

703 httplib_response = self._make_request( 

704 conn, 

705 method, 

706 url, 

707 timeout=timeout_obj, 

708 body=body, 

709 headers=headers, 

710 chunked=chunked, 

711 ) 

712 

713 # If we're going to release the connection in ``finally:``, then 

714 # the response doesn't need to know about the connection. Otherwise 

715 # it will also try to release it and we'll have a double-release 

716 # mess. 

717 response_conn = conn if not release_conn else None 

718 

719 # Pass method to Response for length checking 

720 response_kw["request_method"] = method 

721 

722 # Import httplib's response into our own wrapper object 

723 response = self.ResponseCls.from_httplib( 

724 httplib_response, 

725 pool=self, 

726 connection=response_conn, 

727 retries=retries, 

728 **response_kw 

729 ) 

730 

731 # Everything went great! 

732 clean_exit = True 

733 

734 except EmptyPoolError: 

735 # Didn't get a connection from the pool, no need to clean up 

736 clean_exit = True 

737 release_this_conn = False 

738 raise 

739 

740 except ( 

741 TimeoutError, 

742 HTTPException, 

743 SocketError, 

744 ProtocolError, 

745 BaseSSLError, 

746 SSLError, 

747 CertificateError, 

748 ) as e: 

749 # Discard the connection for these exceptions. It will be 

750 # replaced during the next _get_conn() call. 

751 clean_exit = False 

752 

753 def _is_ssl_error_message_from_http_proxy(ssl_error): 

754 # We're trying to detect the message 'WRONG_VERSION_NUMBER' but 

755 # SSLErrors are kinda all over the place when it comes to the message, 

756 # so we try to cover our bases here! 

757 message = " ".join(re.split("[^a-z]", str(ssl_error).lower())) 

758 return ( 

759 "wrong version number" in message or "unknown protocol" in message 

760 ) 

761 

762 # Try to detect a common user error with proxies which is to 

763 # set an HTTP proxy to be HTTPS when it should be 'http://' 

764 # (ie {'http': 'http://proxy', 'https': 'https://proxy'}) 

765 # Instead we add a nice error message and point to a URL. 

766 if ( 

767 isinstance(e, BaseSSLError) 

768 and self.proxy 

769 and _is_ssl_error_message_from_http_proxy(e) 

770 and conn.proxy 

771 and conn.proxy.scheme == "https" 

772 ): 

773 e = ProxyError( 

774 "Your proxy appears to only use HTTP and not HTTPS, " 

775 "try changing your proxy URL to be HTTP. See: " 

776 "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" 

777 "#https-proxy-error-http-proxy", 

778 SSLError(e), 

779 ) 

780 elif isinstance(e, (BaseSSLError, CertificateError)): 

781 e = SSLError(e) 

782 elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: 

783 e = ProxyError("Cannot connect to proxy.", e) 

784 elif isinstance(e, (SocketError, HTTPException)): 

785 e = ProtocolError("Connection aborted.", e) 

786 

787 retries = retries.increment( 

788 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] 

789 ) 

790 retries.sleep() 

791 

792 # Keep track of the error for the retry warning. 

793 err = e 

794 

795 finally: 

796 if not clean_exit: 

797 # We hit some kind of exception, handled or otherwise. We need 

798 # to throw the connection away unless explicitly told not to. 

799 # Close the connection, set the variable to None, and make sure 

800 # we put the None back in the pool to avoid leaking it. 

801 conn = conn and conn.close() 

802 release_this_conn = True 

803 

804 if release_this_conn: 

805 # Put the connection back to be reused. If the connection is 

806 # expired then it will be None, which will get replaced with a 

807 # fresh connection during _get_conn. 

808 self._put_conn(conn) 

809 

810 if not conn: 

811 # Try again 

812 log.warning( 

813 "Retrying (%r) after connection broken by '%r': %s", retries, err, url 

814 ) 

815 return self.urlopen( 

816 method, 

817 url, 

818 body, 

819 headers, 

820 retries, 

821 redirect, 

822 assert_same_host, 

823 timeout=timeout, 

824 pool_timeout=pool_timeout, 

825 release_conn=release_conn, 

826 chunked=chunked, 

827 body_pos=body_pos, 

828 **response_kw 

829 ) 

830 

831 # Handle redirect? 

832 redirect_location = redirect and response.get_redirect_location() 

833 if redirect_location: 

834 if response.status == 303: 

835 method = "GET" 

836 

837 try: 

838 retries = retries.increment(method, url, response=response, _pool=self) 

839 except MaxRetryError: 

840 if retries.raise_on_redirect: 

841 response.drain_conn() 

842 raise 

843 return response 

844 

845 response.drain_conn() 

846 retries.sleep_for_retry(response) 

847 log.debug("Redirecting %s -> %s", url, redirect_location) 

848 return self.urlopen( 

849 method, 

850 redirect_location, 

851 body, 

852 headers, 

853 retries=retries, 

854 redirect=redirect, 

855 assert_same_host=assert_same_host, 

856 timeout=timeout, 

857 pool_timeout=pool_timeout, 

858 release_conn=release_conn, 

859 chunked=chunked, 

860 body_pos=body_pos, 

861 **response_kw 

862 ) 

863 

864 # Check if we should retry the HTTP response. 

865 has_retry_after = bool(response.headers.get("Retry-After")) 

866 if retries.is_retry(method, response.status, has_retry_after): 

867 try: 

868 retries = retries.increment(method, url, response=response, _pool=self) 

869 except MaxRetryError: 

870 if retries.raise_on_status: 

871 response.drain_conn() 

872 raise 

873 return response 

874 

875 response.drain_conn() 

876 retries.sleep(response) 

877 log.debug("Retry: %s", url) 

878 return self.urlopen( 

879 method, 

880 url, 

881 body, 

882 headers, 

883 retries=retries, 

884 redirect=redirect, 

885 assert_same_host=assert_same_host, 

886 timeout=timeout, 

887 pool_timeout=pool_timeout, 

888 release_conn=release_conn, 

889 chunked=chunked, 

890 body_pos=body_pos, 

891 **response_kw 

892 ) 

893 

894 return response 

895 

896 

897class HTTPSConnectionPool(HTTPConnectionPool): 

898 """ 

899 Same as :class:`.HTTPConnectionPool`, but HTTPS. 

900 

901 :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, 

902 ``assert_hostname`` and ``host`` in this order to verify connections. 

903 If ``assert_hostname`` is False, no verification is done. 

904 

905 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, 

906 ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` 

907 is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade 

908 the connection socket into an SSL socket. 

909 """ 

910 

911 scheme = "https" 

912 ConnectionCls = HTTPSConnection 

913 

914 def __init__( 

915 self, 

916 host, 

917 port=None, 

918 strict=False, 

919 timeout=Timeout.DEFAULT_TIMEOUT, 

920 maxsize=1, 

921 block=False, 

922 headers=None, 

923 retries=None, 

924 _proxy=None, 

925 _proxy_headers=None, 

926 key_file=None, 

927 cert_file=None, 

928 cert_reqs=None, 

929 key_password=None, 

930 ca_certs=None, 

931 ssl_version=None, 

932 assert_hostname=None, 

933 assert_fingerprint=None, 

934 ca_cert_dir=None, 

935 **conn_kw 

936 ): 

937 

938 HTTPConnectionPool.__init__( 

939 self, 

940 host, 

941 port, 

942 strict, 

943 timeout, 

944 maxsize, 

945 block, 

946 headers, 

947 retries, 

948 _proxy, 

949 _proxy_headers, 

950 **conn_kw 

951 ) 

952 

953 self.key_file = key_file 

954 self.cert_file = cert_file 

955 self.cert_reqs = cert_reqs 

956 self.key_password = key_password 

957 self.ca_certs = ca_certs 

958 self.ca_cert_dir = ca_cert_dir 

959 self.ssl_version = ssl_version 

960 self.assert_hostname = assert_hostname 

961 self.assert_fingerprint = assert_fingerprint 

962 

963 def _prepare_conn(self, conn): 

964 """ 

965 Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` 

966 and establish the tunnel if proxy is used. 

967 """ 

968 

969 if isinstance(conn, VerifiedHTTPSConnection): 

970 conn.set_cert( 

971 key_file=self.key_file, 

972 key_password=self.key_password, 

973 cert_file=self.cert_file, 

974 cert_reqs=self.cert_reqs, 

975 ca_certs=self.ca_certs, 

976 ca_cert_dir=self.ca_cert_dir, 

977 assert_hostname=self.assert_hostname, 

978 assert_fingerprint=self.assert_fingerprint, 

979 ) 

980 conn.ssl_version = self.ssl_version 

981 return conn 

982 

983 def _prepare_proxy(self, conn): 

984 """ 

985 Establishes a tunnel connection through HTTP CONNECT. 

986 

987 Tunnel connection is established early because otherwise httplib would 

988 improperly set Host: header to proxy's IP:port. 

989 """ 

990 

991 conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) 

992 

993 if self.proxy.scheme == "https": 

994 conn.tls_in_tls_required = True 

995 

996 conn.connect() 

997 

998 def _new_conn(self): 

999 """ 

1000 Return a fresh :class:`http.client.HTTPSConnection`. 

1001 """ 

1002 self.num_connections += 1 

1003 log.debug( 

1004 "Starting new HTTPS connection (%d): %s:%s", 

1005 self.num_connections, 

1006 self.host, 

1007 self.port or "443", 

1008 ) 

1009 

1010 if not self.ConnectionCls or self.ConnectionCls is DummyConnection: 

1011 raise SSLError( 

1012 "Can't connect to HTTPS URL because the SSL module is not available." 

1013 ) 

1014 

1015 actual_host = self.host 

1016 actual_port = self.port 

1017 if self.proxy is not None: 

1018 actual_host = self.proxy.host 

1019 actual_port = self.proxy.port 

1020 

1021 conn = self.ConnectionCls( 

1022 host=actual_host, 

1023 port=actual_port, 

1024 timeout=self.timeout.connect_timeout, 

1025 strict=self.strict, 

1026 cert_file=self.cert_file, 

1027 key_file=self.key_file, 

1028 key_password=self.key_password, 

1029 **self.conn_kw 

1030 ) 

1031 

1032 return self._prepare_conn(conn) 

1033 

1034 def _validate_conn(self, conn): 

1035 """ 

1036 Called right before a request is made, after the socket is created. 

1037 """ 

1038 super(HTTPSConnectionPool, self)._validate_conn(conn) 

1039 

1040 # Force connect early to allow us to validate the connection. 

1041 if not getattr(conn, "sock", None): # AppEngine might not have `.sock` 

1042 conn.connect() 

1043 

1044 if not conn.is_verified: 

1045 warnings.warn( 

1046 ( 

1047 "Unverified HTTPS request is being made to host '%s'. " 

1048 "Adding certificate verification is strongly advised. See: " 

1049 "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" 

1050 "#ssl-warnings" % conn.host 

1051 ), 

1052 InsecureRequestWarning, 

1053 ) 

1054 

1055 if getattr(conn, "proxy_is_verified", None) is False: 

1056 warnings.warn( 

1057 ( 

1058 "Unverified HTTPS connection done to an HTTPS proxy. " 

1059 "Adding certificate verification is strongly advised. See: " 

1060 "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" 

1061 "#ssl-warnings" 

1062 ), 

1063 InsecureRequestWarning, 

1064 ) 

1065 

1066 

1067def connection_from_url(url, **kw): 

1068 """ 

1069 Given a url, return an :class:`.ConnectionPool` instance of its host. 

1070 

1071 This is a shortcut for not having to parse out the scheme, host, and port 

1072 of the url before creating an :class:`.ConnectionPool` instance. 

1073 

1074 :param url: 

1075 Absolute URL string that must include the scheme. Port is optional. 

1076 

1077 :param \\**kw: 

1078 Passes additional parameters to the constructor of the appropriate 

1079 :class:`.ConnectionPool`. Useful for specifying things like 

1080 timeout, maxsize, headers, etc. 

1081 

1082 Example:: 

1083 

1084 >>> conn = connection_from_url('http://google.com/') 

1085 >>> r = conn.request('GET', '/') 

1086 """ 

1087 scheme, host, port = get_host(url) 

1088 port = port or port_by_scheme.get(scheme, 80) 

1089 if scheme == "https": 

1090 return HTTPSConnectionPool(host, port=port, **kw) 

1091 else: 

1092 return HTTPConnectionPool(host, port=port, **kw) 

1093 

1094 

1095def _normalize_host(host, scheme): 

1096 """ 

1097 Normalize hosts for comparisons and use with sockets. 

1098 """ 

1099 

1100 host = normalize_host(host, scheme) 

1101 

1102 # httplib doesn't like it when we include brackets in IPv6 addresses 

1103 # Specifically, if we include brackets but also pass the port then 

1104 # httplib crazily doubles up the square brackets on the Host header. 

1105 # Instead, we need to make sure we never pass ``None`` as the port. 

1106 # However, for backward compatibility reasons we can't actually 

1107 # *assert* that. See http://bugs.python.org/issue28539 

1108 if host.startswith("[") and host.endswith("]"): 

1109 host = host[1:-1] 

1110 return host