Coverage for /var/srv/projects/api.amasfac.comuna18.com/tmp/venv/lib/python3.9/site-packages/stripe/http_client.py: 20%
351 statements
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
« prev ^ index » next coverage.py v6.4.4, created at 2023-07-17 14:22 -0600
1from __future__ import absolute_import, division, print_function
3import sys
4import textwrap
5import warnings
6import email
7import time
8import random
9import threading
10import json
12import stripe
13from stripe import error, util, six
14from stripe.request_metrics import RequestMetrics
16# - Requests is the preferred HTTP library
17# - Google App Engine has urlfetch
18# - Use Pycurl if it's there (at least it verifies SSL certs)
19# - Fall back to urllib2 with a warning if needed
20try:
21 from stripe.six.moves import urllib
22except ImportError:
23 # Try to load in urllib2, but don't sweat it if it's not available.
24 pass
26try:
27 import pycurl
28except ImportError:
29 pycurl = None
31try:
32 import requests
33except ImportError:
34 requests = None
35else:
36 try:
37 # Require version 0.8.8, but don't want to depend on distutils
38 version = requests.__version__
39 major, minor, patch = [int(i) for i in version.split(".")]
40 except Exception:
41 # Probably some new-fangled version, so it should support verify
42 pass
43 else:
44 if (major, minor, patch) < (0, 8, 8): 44 ↛ 45line 44 didn't jump to line 45, because the condition on line 44 was never true
45 sys.stderr.write(
46 "Warning: the Stripe library requires that your Python "
47 '"requests" library be newer than version 0.8.8, but your '
48 '"requests" library is version %s. Stripe will fall back to '
49 "an alternate HTTP library so everything should work. We "
50 'recommend upgrading your "requests" library. If you have any '
51 "questions, please contact support@stripe.com. (HINT: running "
52 '"pip install -U requests" should upgrade your requests '
53 "library to the latest version.)" % (version,)
54 )
55 requests = None
57try:
58 from google.appengine.api import urlfetch
59except ImportError:
60 urlfetch = None
62# proxy support for the pycurl client
63from stripe.six.moves.urllib.parse import urlparse
66def _now_ms():
67 return int(round(time.time() * 1000))
70def new_default_http_client(*args, **kwargs):
71 if urlfetch:
72 impl = UrlFetchClient
73 elif requests:
74 impl = RequestsClient
75 elif pycurl:
76 impl = PycurlClient
77 else:
78 impl = Urllib2Client
79 warnings.warn(
80 "Warning: the Stripe library is falling back to urllib2/urllib "
81 "because neither requests nor pycurl are installed. "
82 "urllib2's SSL implementation doesn't verify server "
83 "certificates. For improved security, we suggest installing "
84 "requests."
85 )
87 return impl(*args, **kwargs)
90class HTTPClient(object):
91 MAX_DELAY = 2
92 INITIAL_DELAY = 0.5
93 MAX_RETRY_AFTER = 60
95 def __init__(self, verify_ssl_certs=True, proxy=None):
96 self._verify_ssl_certs = verify_ssl_certs
97 if proxy:
98 if isinstance(proxy, str):
99 proxy = {"http": proxy, "https": proxy}
100 if not isinstance(proxy, dict):
101 raise ValueError(
102 "Proxy(ies) must be specified as either a string "
103 "URL or a dict() with string URL under the"
104 " "
105 "https"
106 " and/or "
107 "http"
108 " keys."
109 )
110 self._proxy = proxy.copy() if proxy else None
112 self._thread_local = threading.local()
114 def request_with_retries(self, method, url, headers, post_data=None):
115 return self._request_with_retries_internal(
116 method, url, headers, post_data, is_streaming=False
117 )
119 def request_stream_with_retries(
120 self, method, url, headers, post_data=None
121 ):
122 return self._request_with_retries_internal(
123 method, url, headers, post_data, is_streaming=True
124 )
126 def _request_with_retries_internal(
127 self, method, url, headers, post_data, is_streaming
128 ):
129 self._add_telemetry_header(headers)
131 num_retries = 0
133 while True:
134 request_start = _now_ms()
136 try:
137 if is_streaming:
138 response = self.request_stream(
139 method, url, headers, post_data
140 )
141 else:
142 response = self.request(method, url, headers, post_data)
143 connection_error = None
144 except error.APIConnectionError as e:
145 connection_error = e
146 response = None
148 if self._should_retry(response, connection_error, num_retries):
149 if connection_error:
150 util.log_info(
151 "Encountered a retryable error %s"
152 % connection_error.user_message
153 )
154 num_retries += 1
155 sleep_time = self._sleep_time_seconds(num_retries, response)
156 util.log_info(
157 (
158 "Initiating retry %i for request %s %s after "
159 "sleeping %.2f seconds."
160 % (num_retries, method, url, sleep_time)
161 )
162 )
163 time.sleep(sleep_time)
164 else:
165 if response is not None:
166 self._record_request_metrics(response, request_start)
168 return response
169 else:
170 raise connection_error
172 def request(self, method, url, headers, post_data=None):
173 raise NotImplementedError(
174 "HTTPClient subclasses must implement `request`"
175 )
177 def request_stream(self, method, url, headers, post_data=None):
178 raise NotImplementedError(
179 "HTTPClient subclasses must implement `request_stream`"
180 )
182 def _should_retry(self, response, api_connection_error, num_retries):
183 if num_retries >= self._max_network_retries():
184 return False
186 if response is None:
187 # We generally want to retry on timeout and connection
188 # exceptions, but defer this decision to underlying subclass
189 # implementations. They should evaluate the driver-specific
190 # errors worthy of retries, and set flag on the error returned.
191 return api_connection_error.should_retry
193 _, status_code, rheaders = response
195 # The API may ask us not to retry (eg; if doing so would be a no-op)
196 # or advise us to retry (eg; in cases of lock timeouts); we defer to that.
197 #
198 # Note that we expect the headers object to be a CaseInsensitiveDict, as is the case with the requests library.
199 if rheaders is not None and "stripe-should-retry" in rheaders:
200 if rheaders["stripe-should-retry"] == "false":
201 return False
202 if rheaders["stripe-should-retry"] == "true":
203 return True
205 # Retry on conflict errors.
206 if status_code == 409:
207 return True
209 # Retry on 500, 503, and other internal errors.
210 #
211 # Note that we expect the stripe-should-retry header to be false
212 # in most cases when a 500 is returned, since our idempotency framework
213 # would typically replay it anyway.
214 if status_code >= 500:
215 return True
217 return False
219 def _max_network_retries(self):
220 from stripe import max_network_retries
222 # Configured retries, isolated here for tests
223 return max_network_retries
225 def _retry_after_header(self, response=None):
226 if response is None:
227 return None
228 _, _, rheaders = response
230 try:
231 return int(rheaders["retry-after"])
232 except (KeyError, ValueError):
233 return None
235 def _sleep_time_seconds(self, num_retries, response=None):
236 # Apply exponential backoff with initial_network_retry_delay on the
237 # number of num_retries so far as inputs.
238 # Do not allow the number to exceed max_network_retry_delay.
239 sleep_seconds = min(
240 HTTPClient.INITIAL_DELAY * (2 ** (num_retries - 1)),
241 HTTPClient.MAX_DELAY,
242 )
244 sleep_seconds = self._add_jitter_time(sleep_seconds)
246 # But never sleep less than the base sleep seconds.
247 sleep_seconds = max(HTTPClient.INITIAL_DELAY, sleep_seconds)
249 # And never sleep less than the time the API asks us to wait, assuming it's a reasonable ask.
250 retry_after = self._retry_after_header(response) or 0
251 if retry_after <= HTTPClient.MAX_RETRY_AFTER:
252 sleep_seconds = max(retry_after, sleep_seconds)
254 return sleep_seconds
256 def _add_jitter_time(self, sleep_seconds):
257 # Randomize the value in [(sleep_seconds/ 2) to (sleep_seconds)]
258 # Also separated method here to isolate randomness for tests
259 sleep_seconds *= 0.5 * (1 + random.uniform(0, 1))
260 return sleep_seconds
262 def _add_telemetry_header(self, headers):
263 last_request_metrics = getattr(
264 self._thread_local, "last_request_metrics", None
265 )
266 if stripe.enable_telemetry and last_request_metrics:
267 telemetry = {
268 "last_request_metrics": last_request_metrics.payload()
269 }
270 headers["X-Stripe-Client-Telemetry"] = json.dumps(telemetry)
272 def _record_request_metrics(self, response, request_start):
273 _, _, rheaders = response
274 if "Request-Id" in rheaders and stripe.enable_telemetry:
275 request_id = rheaders["Request-Id"]
276 request_duration_ms = _now_ms() - request_start
277 self._thread_local.last_request_metrics = RequestMetrics(
278 request_id, request_duration_ms
279 )
281 def close(self):
282 raise NotImplementedError(
283 "HTTPClient subclasses must implement `close`"
284 )
287class RequestsClient(HTTPClient):
288 name = "requests"
290 def __init__(self, timeout=80, session=None, **kwargs):
291 super(RequestsClient, self).__init__(**kwargs)
292 self._session = session
293 self._timeout = timeout
295 def request(self, method, url, headers, post_data=None):
296 return self._request_internal(
297 method, url, headers, post_data, is_streaming=False
298 )
300 def request_stream(self, method, url, headers, post_data=None):
301 return self._request_internal(
302 method, url, headers, post_data, is_streaming=True
303 )
305 def _request_internal(self, method, url, headers, post_data, is_streaming):
306 kwargs = {}
307 if self._verify_ssl_certs:
308 kwargs["verify"] = stripe.ca_bundle_path
309 else:
310 kwargs["verify"] = False
312 if self._proxy:
313 kwargs["proxies"] = self._proxy
315 if is_streaming:
316 kwargs["stream"] = True
318 if getattr(self._thread_local, "session", None) is None:
319 self._thread_local.session = self._session or requests.Session()
321 try:
322 try:
323 result = self._thread_local.session.request(
324 method,
325 url,
326 headers=headers,
327 data=post_data,
328 timeout=self._timeout,
329 **kwargs
330 )
331 except TypeError as e:
332 raise TypeError(
333 "Warning: It looks like your installed version of the "
334 '"requests" library is not compatible with Stripe\'s '
335 "usage thereof. (HINT: The most likely cause is that "
336 'your "requests" library is out of date. You can fix '
337 'that by running "pip install -U requests".) The '
338 "underlying error was: %s" % (e,)
339 )
341 if is_streaming:
342 content = result.raw
343 else:
344 # This causes the content to actually be read, which could cause
345 # e.g. a socket timeout. TODO: The other fetch methods probably
346 # are susceptible to the same and should be updated.
347 content = result.content
349 status_code = result.status_code
350 except Exception as e:
351 # Would catch just requests.exceptions.RequestException, but can
352 # also raise ValueError, RuntimeError, etc.
353 self._handle_request_error(e)
354 return content, status_code, result.headers
356 def _handle_request_error(self, e):
358 # Catch SSL error first as it belongs to ConnectionError,
359 # but we don't want to retry
360 if isinstance(e, requests.exceptions.SSLError):
361 msg = (
362 "Could not verify Stripe's SSL certificate. Please make "
363 "sure that your network is not intercepting certificates. "
364 "If this problem persists, let us know at "
365 "support@stripe.com."
366 )
367 err = "%s: %s" % (type(e).__name__, str(e))
368 should_retry = False
369 # Retry only timeout and connect errors; similar to urllib3 Retry
370 elif isinstance(
371 e,
372 (requests.exceptions.Timeout, requests.exceptions.ConnectionError),
373 ):
374 msg = (
375 "Unexpected error communicating with Stripe. "
376 "If this problem persists, let us know at "
377 "support@stripe.com."
378 )
379 err = "%s: %s" % (type(e).__name__, str(e))
380 should_retry = True
381 # Catch remaining request exceptions
382 elif isinstance(e, requests.exceptions.RequestException):
383 msg = (
384 "Unexpected error communicating with Stripe. "
385 "If this problem persists, let us know at "
386 "support@stripe.com."
387 )
388 err = "%s: %s" % (type(e).__name__, str(e))
389 should_retry = False
390 else:
391 msg = (
392 "Unexpected error communicating with Stripe. "
393 "It looks like there's probably a configuration "
394 "issue locally. If this problem persists, let us "
395 "know at support@stripe.com."
396 )
397 err = "A %s was raised" % (type(e).__name__,)
398 if str(e):
399 err += " with error message %s" % (str(e),)
400 else:
401 err += " with no error message"
402 should_retry = False
404 msg = textwrap.fill(msg) + "\n\n(Network error: %s)" % (err,)
405 raise error.APIConnectionError(msg, should_retry=should_retry)
407 def close(self):
408 if getattr(self._thread_local, "session", None) is not None:
409 self._thread_local.session.close()
412class UrlFetchClient(HTTPClient):
413 name = "urlfetch"
415 def __init__(self, verify_ssl_certs=True, proxy=None, deadline=55):
416 super(UrlFetchClient, self).__init__(
417 verify_ssl_certs=verify_ssl_certs, proxy=proxy
418 )
420 # no proxy support in urlfetch. for a patch, see:
421 # https://code.google.com/p/googleappengine/issues/detail?id=544
422 if proxy:
423 raise ValueError(
424 "No proxy support in urlfetch library. "
425 "Set stripe.default_http_client to either RequestsClient, "
426 "PycurlClient, or Urllib2Client instance to use a proxy."
427 )
429 self._verify_ssl_certs = verify_ssl_certs
430 # GAE requests time out after 60 seconds, so make sure to default
431 # to 55 seconds to allow for a slow Stripe
432 self._deadline = deadline
434 def request(self, method, url, headers, post_data=None):
435 return self._request_internal(
436 method, url, headers, post_data, is_streaming=False
437 )
439 def request_stream(self, method, url, headers, post_data=None):
440 return self._request_internal(
441 method, url, headers, post_data, is_streaming=True
442 )
444 def _request_internal(self, method, url, headers, post_data, is_streaming):
445 try:
446 result = urlfetch.fetch(
447 url=url,
448 method=method,
449 headers=headers,
450 # Google App Engine doesn't let us specify our own cert bundle.
451 # However, that's ok because the CA bundle they use recognizes
452 # api.stripe.com.
453 validate_certificate=self._verify_ssl_certs,
454 deadline=self._deadline,
455 payload=post_data,
456 )
457 except urlfetch.Error as e:
458 self._handle_request_error(e, url)
460 if is_streaming:
461 content = util.io.BytesIO(str.encode(result.content))
462 else:
463 content = result.content
465 return content, result.status_code, result.headers
467 def _handle_request_error(self, e, url):
468 if isinstance(e, urlfetch.InvalidURLError):
469 msg = (
470 "The Stripe library attempted to fetch an "
471 "invalid URL (%r). This is likely due to a bug "
472 "in the Stripe Python bindings. Please let us know "
473 "at support@stripe.com." % (url,)
474 )
475 elif isinstance(e, urlfetch.DownloadError):
476 msg = "There was a problem retrieving data from Stripe."
477 elif isinstance(e, urlfetch.ResponseTooLargeError):
478 msg = (
479 "There was a problem receiving all of your data from "
480 "Stripe. This is likely due to a bug in Stripe. "
481 "Please let us know at support@stripe.com."
482 )
483 else:
484 msg = (
485 "Unexpected error communicating with Stripe. If this "
486 "problem persists, let us know at support@stripe.com."
487 )
489 msg = textwrap.fill(msg) + "\n\n(Network error: " + str(e) + ")"
490 raise error.APIConnectionError(msg)
492 def close(self):
493 pass
496class PycurlClient(HTTPClient):
497 name = "pycurl"
499 def __init__(self, verify_ssl_certs=True, proxy=None):
500 super(PycurlClient, self).__init__(
501 verify_ssl_certs=verify_ssl_certs, proxy=proxy
502 )
504 # Initialize this within the object so that we can reuse connections.
505 self._curl = pycurl.Curl()
507 # need to urlparse the proxy, since PyCurl
508 # consumes the proxy url in small pieces
509 if self._proxy:
510 # now that we have the parser, get the proxy url pieces
511 proxy = self._proxy
512 for scheme, value in six.iteritems(proxy):
513 proxy[scheme] = urlparse(value)
515 def parse_headers(self, data):
516 if "\r\n" not in data:
517 return {}
518 raw_headers = data.split("\r\n", 1)[1]
519 headers = email.message_from_string(raw_headers)
520 return dict((k.lower(), v) for k, v in six.iteritems(dict(headers)))
522 def request(self, method, url, headers, post_data=None):
523 return self._request_internal(
524 method, url, headers, post_data, is_streaming=False
525 )
527 def request_stream(self, method, url, headers, post_data=None):
528 return self._request_internal(
529 method, url, headers, post_data, is_streaming=True
530 )
532 def _request_internal(self, method, url, headers, post_data, is_streaming):
533 b = util.io.BytesIO()
534 rheaders = util.io.BytesIO()
536 # Pycurl's design is a little weird: although we set per-request
537 # options on this object, it's also capable of maintaining established
538 # connections. Here we call reset() between uses to make sure it's in a
539 # pristine state, but notably reset() doesn't reset connections, so we
540 # still get to take advantage of those by virtue of re-using the same
541 # object.
542 self._curl.reset()
544 proxy = self._get_proxy(url)
545 if proxy:
546 if proxy.hostname:
547 self._curl.setopt(pycurl.PROXY, proxy.hostname)
548 if proxy.port:
549 self._curl.setopt(pycurl.PROXYPORT, proxy.port)
550 if proxy.username or proxy.password:
551 self._curl.setopt(
552 pycurl.PROXYUSERPWD,
553 "%s:%s" % (proxy.username, proxy.password),
554 )
556 if method == "get":
557 self._curl.setopt(pycurl.HTTPGET, 1)
558 elif method == "post":
559 self._curl.setopt(pycurl.POST, 1)
560 self._curl.setopt(pycurl.POSTFIELDS, post_data)
561 else:
562 self._curl.setopt(pycurl.CUSTOMREQUEST, method.upper())
564 # pycurl doesn't like unicode URLs
565 self._curl.setopt(pycurl.URL, util.utf8(url))
567 self._curl.setopt(pycurl.WRITEFUNCTION, b.write)
568 self._curl.setopt(pycurl.HEADERFUNCTION, rheaders.write)
569 self._curl.setopt(pycurl.NOSIGNAL, 1)
570 self._curl.setopt(pycurl.CONNECTTIMEOUT, 30)
571 self._curl.setopt(pycurl.TIMEOUT, 80)
572 self._curl.setopt(
573 pycurl.HTTPHEADER,
574 ["%s: %s" % (k, v) for k, v in six.iteritems(dict(headers))],
575 )
576 if self._verify_ssl_certs:
577 self._curl.setopt(pycurl.CAINFO, stripe.ca_bundle_path)
578 else:
579 self._curl.setopt(pycurl.SSL_VERIFYHOST, False)
581 try:
582 self._curl.perform()
583 except pycurl.error as e:
584 self._handle_request_error(e)
586 if is_streaming:
587 b.seek(0)
588 rcontent = b
589 else:
590 rcontent = b.getvalue().decode("utf-8")
592 rcode = self._curl.getinfo(pycurl.RESPONSE_CODE)
593 headers = self.parse_headers(rheaders.getvalue().decode("utf-8"))
595 return rcontent, rcode, headers
597 def _handle_request_error(self, e):
598 if e.args[0] in [
599 pycurl.E_COULDNT_CONNECT,
600 pycurl.E_COULDNT_RESOLVE_HOST,
601 pycurl.E_OPERATION_TIMEOUTED,
602 ]:
603 msg = (
604 "Could not connect to Stripe. Please check your "
605 "internet connection and try again. If this problem "
606 "persists, you should check Stripe's service status at "
607 "https://twitter.com/stripestatus, or let us know at "
608 "support@stripe.com."
609 )
610 should_retry = True
611 elif e.args[0] in [pycurl.E_SSL_CACERT, pycurl.E_SSL_PEER_CERTIFICATE]:
612 msg = (
613 "Could not verify Stripe's SSL certificate. Please make "
614 "sure that your network is not intercepting certificates. "
615 "If this problem persists, let us know at "
616 "support@stripe.com."
617 )
618 should_retry = False
619 else:
620 msg = (
621 "Unexpected error communicating with Stripe. If this "
622 "problem persists, let us know at support@stripe.com."
623 )
624 should_retry = False
626 msg = textwrap.fill(msg) + "\n\n(Network error: " + e.args[1] + ")"
627 raise error.APIConnectionError(msg, should_retry=should_retry)
629 def _get_proxy(self, url):
630 if self._proxy:
631 proxy = self._proxy
632 scheme = url.split(":")[0] if url else None
633 if scheme:
634 return proxy.get(scheme, proxy.get(scheme[0:-1]))
635 return None
637 def close(self):
638 pass
641class Urllib2Client(HTTPClient):
642 name = "urllib.request"
644 def __init__(self, verify_ssl_certs=True, proxy=None):
645 super(Urllib2Client, self).__init__(
646 verify_ssl_certs=verify_ssl_certs, proxy=proxy
647 )
648 # prepare and cache proxy tied opener here
649 self._opener = None
650 if self._proxy:
651 proxy = urllib.request.ProxyHandler(self._proxy)
652 self._opener = urllib.request.build_opener(proxy)
654 def request(self, method, url, headers, post_data=None):
655 return self._request_internal(
656 method, url, headers, post_data, is_streaming=False
657 )
659 def request_stream(self, method, url, headers, post_data=None):
660 return self._request_internal(
661 method, url, headers, post_data, is_streaming=True
662 )
664 def _request_internal(self, method, url, headers, post_data, is_streaming):
665 if six.PY3 and isinstance(post_data, six.string_types):
666 post_data = post_data.encode("utf-8")
668 req = urllib.request.Request(url, post_data, headers)
670 if method not in ("get", "post"):
671 req.get_method = lambda: method.upper()
673 try:
674 # use the custom proxy tied opener, if any.
675 # otherwise, fall to the default urllib opener.
676 response = (
677 self._opener.open(req)
678 if self._opener
679 else urllib.request.urlopen(req)
680 )
682 if is_streaming:
683 rcontent = response
684 else:
685 rcontent = response.read()
687 rcode = response.code
688 headers = dict(response.info())
689 except urllib.error.HTTPError as e:
690 rcode = e.code
691 rcontent = e.read()
692 headers = dict(e.info())
693 except (urllib.error.URLError, ValueError) as e:
694 self._handle_request_error(e)
695 lh = dict((k.lower(), v) for k, v in six.iteritems(dict(headers)))
696 return rcontent, rcode, lh
698 def _handle_request_error(self, e):
699 msg = (
700 "Unexpected error communicating with Stripe. "
701 "If this problem persists, let us know at support@stripe.com."
702 )
703 msg = textwrap.fill(msg) + "\n\n(Network error: " + str(e) + ")"
704 raise error.APIConnectionError(msg)
706 def close(self):
707 pass