Mercurial > repos > guerler > hhblits
comparison lib/python3.8/site-packages/pip/_vendor/requests/models.py @ 0:9e54283cc701 draft
"planemo upload commit d12c32a45bcd441307e632fca6d9af7d60289d44"
author | guerler |
---|---|
date | Mon, 27 Jul 2020 03:47:31 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:9e54283cc701 |
---|---|
1 # -*- coding: utf-8 -*- | |
2 | |
3 """ | |
4 requests.models | |
5 ~~~~~~~~~~~~~~~ | |
6 | |
7 This module contains the primary objects that power Requests. | |
8 """ | |
9 | |
10 import datetime | |
11 import sys | |
12 | |
13 # Import encoding now, to avoid implicit import later. | |
14 # Implicit import within threads may cause LookupError when standard library is in a ZIP, | |
15 # such as in Embedded Python. See https://github.com/requests/requests/issues/3578. | |
16 import encodings.idna | |
17 | |
18 from pip._vendor.urllib3.fields import RequestField | |
19 from pip._vendor.urllib3.filepost import encode_multipart_formdata | |
20 from pip._vendor.urllib3.util import parse_url | |
21 from pip._vendor.urllib3.exceptions import ( | |
22 DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) | |
23 | |
24 from io import UnsupportedOperation | |
25 from .hooks import default_hooks | |
26 from .structures import CaseInsensitiveDict | |
27 | |
28 from .auth import HTTPBasicAuth | |
29 from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar | |
30 from .exceptions import ( | |
31 HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, | |
32 ContentDecodingError, ConnectionError, StreamConsumedError) | |
33 from ._internal_utils import to_native_string, unicode_is_ascii | |
34 from .utils import ( | |
35 guess_filename, get_auth_from_url, requote_uri, | |
36 stream_decode_response_unicode, to_key_val_list, parse_header_links, | |
37 iter_slices, guess_json_utf, super_len, check_header_validity) | |
38 from .compat import ( | |
39 Callable, Mapping, | |
40 cookielib, urlunparse, urlsplit, urlencode, str, bytes, | |
41 is_py2, chardet, builtin_str, basestring) | |
42 from .compat import json as complexjson | |
43 from .status_codes import codes | |
44 | |
45 #: The set of HTTP status codes that indicate an automatically | |
46 #: processable redirect. | |
47 REDIRECT_STATI = ( | |
48 codes.moved, # 301 | |
49 codes.found, # 302 | |
50 codes.other, # 303 | |
51 codes.temporary_redirect, # 307 | |
52 codes.permanent_redirect, # 308 | |
53 ) | |
54 | |
55 DEFAULT_REDIRECT_LIMIT = 30 | |
56 CONTENT_CHUNK_SIZE = 10 * 1024 | |
57 ITER_CHUNK_SIZE = 512 | |
58 | |
59 | |
60 class RequestEncodingMixin(object): | |
61 @property | |
62 def path_url(self): | |
63 """Build the path URL to use.""" | |
64 | |
65 url = [] | |
66 | |
67 p = urlsplit(self.url) | |
68 | |
69 path = p.path | |
70 if not path: | |
71 path = '/' | |
72 | |
73 url.append(path) | |
74 | |
75 query = p.query | |
76 if query: | |
77 url.append('?') | |
78 url.append(query) | |
79 | |
80 return ''.join(url) | |
81 | |
82 @staticmethod | |
83 def _encode_params(data): | |
84 """Encode parameters in a piece of data. | |
85 | |
86 Will successfully encode parameters when passed as a dict or a list of | |
87 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary | |
88 if parameters are supplied as a dict. | |
89 """ | |
90 | |
91 if isinstance(data, (str, bytes)): | |
92 return data | |
93 elif hasattr(data, 'read'): | |
94 return data | |
95 elif hasattr(data, '__iter__'): | |
96 result = [] | |
97 for k, vs in to_key_val_list(data): | |
98 if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): | |
99 vs = [vs] | |
100 for v in vs: | |
101 if v is not None: | |
102 result.append( | |
103 (k.encode('utf-8') if isinstance(k, str) else k, | |
104 v.encode('utf-8') if isinstance(v, str) else v)) | |
105 return urlencode(result, doseq=True) | |
106 else: | |
107 return data | |
108 | |
109 @staticmethod | |
110 def _encode_files(files, data): | |
111 """Build the body for a multipart/form-data request. | |
112 | |
113 Will successfully encode files when passed as a dict or a list of | |
114 tuples. Order is retained if data is a list of tuples but arbitrary | |
115 if parameters are supplied as a dict. | |
116 The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) | |
117 or 4-tuples (filename, fileobj, contentype, custom_headers). | |
118 """ | |
119 if (not files): | |
120 raise ValueError("Files must be provided.") | |
121 elif isinstance(data, basestring): | |
122 raise ValueError("Data must not be a string.") | |
123 | |
124 new_fields = [] | |
125 fields = to_key_val_list(data or {}) | |
126 files = to_key_val_list(files or {}) | |
127 | |
128 for field, val in fields: | |
129 if isinstance(val, basestring) or not hasattr(val, '__iter__'): | |
130 val = [val] | |
131 for v in val: | |
132 if v is not None: | |
133 # Don't call str() on bytestrings: in Py3 it all goes wrong. | |
134 if not isinstance(v, bytes): | |
135 v = str(v) | |
136 | |
137 new_fields.append( | |
138 (field.decode('utf-8') if isinstance(field, bytes) else field, | |
139 v.encode('utf-8') if isinstance(v, str) else v)) | |
140 | |
141 for (k, v) in files: | |
142 # support for explicit filename | |
143 ft = None | |
144 fh = None | |
145 if isinstance(v, (tuple, list)): | |
146 if len(v) == 2: | |
147 fn, fp = v | |
148 elif len(v) == 3: | |
149 fn, fp, ft = v | |
150 else: | |
151 fn, fp, ft, fh = v | |
152 else: | |
153 fn = guess_filename(v) or k | |
154 fp = v | |
155 | |
156 if isinstance(fp, (str, bytes, bytearray)): | |
157 fdata = fp | |
158 elif hasattr(fp, 'read'): | |
159 fdata = fp.read() | |
160 elif fp is None: | |
161 continue | |
162 else: | |
163 fdata = fp | |
164 | |
165 rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) | |
166 rf.make_multipart(content_type=ft) | |
167 new_fields.append(rf) | |
168 | |
169 body, content_type = encode_multipart_formdata(new_fields) | |
170 | |
171 return body, content_type | |
172 | |
173 | |
174 class RequestHooksMixin(object): | |
175 def register_hook(self, event, hook): | |
176 """Properly register a hook.""" | |
177 | |
178 if event not in self.hooks: | |
179 raise ValueError('Unsupported event specified, with event name "%s"' % (event)) | |
180 | |
181 if isinstance(hook, Callable): | |
182 self.hooks[event].append(hook) | |
183 elif hasattr(hook, '__iter__'): | |
184 self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) | |
185 | |
186 def deregister_hook(self, event, hook): | |
187 """Deregister a previously registered hook. | |
188 Returns True if the hook existed, False if not. | |
189 """ | |
190 | |
191 try: | |
192 self.hooks[event].remove(hook) | |
193 return True | |
194 except ValueError: | |
195 return False | |
196 | |
197 | |
198 class Request(RequestHooksMixin): | |
199 """A user-created :class:`Request <Request>` object. | |
200 | |
201 Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. | |
202 | |
203 :param method: HTTP method to use. | |
204 :param url: URL to send. | |
205 :param headers: dictionary of headers to send. | |
206 :param files: dictionary of {filename: fileobject} files to multipart upload. | |
207 :param data: the body to attach to the request. If a dictionary or | |
208 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
209 take place. | |
210 :param json: json for the body to attach to the request (if files or data is not specified). | |
211 :param params: URL parameters to append to the URL. If a dictionary or | |
212 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
213 take place. | |
214 :param auth: Auth handler or (user, pass) tuple. | |
215 :param cookies: dictionary or CookieJar of cookies to attach to this request. | |
216 :param hooks: dictionary of callback hooks, for internal usage. | |
217 | |
218 Usage:: | |
219 | |
220 >>> import requests | |
221 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
222 >>> req.prepare() | |
223 <PreparedRequest [GET]> | |
224 """ | |
225 | |
226 def __init__(self, | |
227 method=None, url=None, headers=None, files=None, data=None, | |
228 params=None, auth=None, cookies=None, hooks=None, json=None): | |
229 | |
230 # Default empty dicts for dict params. | |
231 data = [] if data is None else data | |
232 files = [] if files is None else files | |
233 headers = {} if headers is None else headers | |
234 params = {} if params is None else params | |
235 hooks = {} if hooks is None else hooks | |
236 | |
237 self.hooks = default_hooks() | |
238 for (k, v) in list(hooks.items()): | |
239 self.register_hook(event=k, hook=v) | |
240 | |
241 self.method = method | |
242 self.url = url | |
243 self.headers = headers | |
244 self.files = files | |
245 self.data = data | |
246 self.json = json | |
247 self.params = params | |
248 self.auth = auth | |
249 self.cookies = cookies | |
250 | |
251 def __repr__(self): | |
252 return '<Request [%s]>' % (self.method) | |
253 | |
254 def prepare(self): | |
255 """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" | |
256 p = PreparedRequest() | |
257 p.prepare( | |
258 method=self.method, | |
259 url=self.url, | |
260 headers=self.headers, | |
261 files=self.files, | |
262 data=self.data, | |
263 json=self.json, | |
264 params=self.params, | |
265 auth=self.auth, | |
266 cookies=self.cookies, | |
267 hooks=self.hooks, | |
268 ) | |
269 return p | |
270 | |
271 | |
272 class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): | |
273 """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, | |
274 containing the exact bytes that will be sent to the server. | |
275 | |
276 Generated from either a :class:`Request <Request>` object or manually. | |
277 | |
278 Usage:: | |
279 | |
280 >>> import requests | |
281 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
282 >>> r = req.prepare() | |
283 <PreparedRequest [GET]> | |
284 | |
285 >>> s = requests.Session() | |
286 >>> s.send(r) | |
287 <Response [200]> | |
288 """ | |
289 | |
290 def __init__(self): | |
291 #: HTTP verb to send to the server. | |
292 self.method = None | |
293 #: HTTP URL to send the request to. | |
294 self.url = None | |
295 #: dictionary of HTTP headers. | |
296 self.headers = None | |
297 # The `CookieJar` used to create the Cookie header will be stored here | |
298 # after prepare_cookies is called | |
299 self._cookies = None | |
300 #: request body to send to the server. | |
301 self.body = None | |
302 #: dictionary of callback hooks, for internal usage. | |
303 self.hooks = default_hooks() | |
304 #: integer denoting starting position of a readable file-like body. | |
305 self._body_position = None | |
306 | |
307 def prepare(self, | |
308 method=None, url=None, headers=None, files=None, data=None, | |
309 params=None, auth=None, cookies=None, hooks=None, json=None): | |
310 """Prepares the entire request with the given parameters.""" | |
311 | |
312 self.prepare_method(method) | |
313 self.prepare_url(url, params) | |
314 self.prepare_headers(headers) | |
315 self.prepare_cookies(cookies) | |
316 self.prepare_body(data, files, json) | |
317 self.prepare_auth(auth, url) | |
318 | |
319 # Note that prepare_auth must be last to enable authentication schemes | |
320 # such as OAuth to work on a fully prepared request. | |
321 | |
322 # This MUST go after prepare_auth. Authenticators could add a hook | |
323 self.prepare_hooks(hooks) | |
324 | |
325 def __repr__(self): | |
326 return '<PreparedRequest [%s]>' % (self.method) | |
327 | |
328 def copy(self): | |
329 p = PreparedRequest() | |
330 p.method = self.method | |
331 p.url = self.url | |
332 p.headers = self.headers.copy() if self.headers is not None else None | |
333 p._cookies = _copy_cookie_jar(self._cookies) | |
334 p.body = self.body | |
335 p.hooks = self.hooks | |
336 p._body_position = self._body_position | |
337 return p | |
338 | |
339 def prepare_method(self, method): | |
340 """Prepares the given HTTP method.""" | |
341 self.method = method | |
342 if self.method is not None: | |
343 self.method = to_native_string(self.method.upper()) | |
344 | |
345 @staticmethod | |
346 def _get_idna_encoded_host(host): | |
347 from pip._vendor import idna | |
348 | |
349 try: | |
350 host = idna.encode(host, uts46=True).decode('utf-8') | |
351 except idna.IDNAError: | |
352 raise UnicodeError | |
353 return host | |
354 | |
355 def prepare_url(self, url, params): | |
356 """Prepares the given HTTP URL.""" | |
357 #: Accept objects that have string representations. | |
358 #: We're unable to blindly call unicode/str functions | |
359 #: as this will include the bytestring indicator (b'') | |
360 #: on python 3.x. | |
361 #: https://github.com/requests/requests/pull/2238 | |
362 if isinstance(url, bytes): | |
363 url = url.decode('utf8') | |
364 else: | |
365 url = unicode(url) if is_py2 else str(url) | |
366 | |
367 # Remove leading whitespaces from url | |
368 url = url.lstrip() | |
369 | |
370 # Don't do any URL preparation for non-HTTP schemes like `mailto`, | |
371 # `data` etc to work around exceptions from `url_parse`, which | |
372 # handles RFC 3986 only. | |
373 if ':' in url and not url.lower().startswith('http'): | |
374 self.url = url | |
375 return | |
376 | |
377 # Support for unicode domain names and paths. | |
378 try: | |
379 scheme, auth, host, port, path, query, fragment = parse_url(url) | |
380 except LocationParseError as e: | |
381 raise InvalidURL(*e.args) | |
382 | |
383 if not scheme: | |
384 error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") | |
385 error = error.format(to_native_string(url, 'utf8')) | |
386 | |
387 raise MissingSchema(error) | |
388 | |
389 if not host: | |
390 raise InvalidURL("Invalid URL %r: No host supplied" % url) | |
391 | |
392 # In general, we want to try IDNA encoding the hostname if the string contains | |
393 # non-ASCII characters. This allows users to automatically get the correct IDNA | |
394 # behaviour. For strings containing only ASCII characters, we need to also verify | |
395 # it doesn't start with a wildcard (*), before allowing the unencoded hostname. | |
396 if not unicode_is_ascii(host): | |
397 try: | |
398 host = self._get_idna_encoded_host(host) | |
399 except UnicodeError: | |
400 raise InvalidURL('URL has an invalid label.') | |
401 elif host.startswith(u'*'): | |
402 raise InvalidURL('URL has an invalid label.') | |
403 | |
404 # Carefully reconstruct the network location | |
405 netloc = auth or '' | |
406 if netloc: | |
407 netloc += '@' | |
408 netloc += host | |
409 if port: | |
410 netloc += ':' + str(port) | |
411 | |
412 # Bare domains aren't valid URLs. | |
413 if not path: | |
414 path = '/' | |
415 | |
416 if is_py2: | |
417 if isinstance(scheme, str): | |
418 scheme = scheme.encode('utf-8') | |
419 if isinstance(netloc, str): | |
420 netloc = netloc.encode('utf-8') | |
421 if isinstance(path, str): | |
422 path = path.encode('utf-8') | |
423 if isinstance(query, str): | |
424 query = query.encode('utf-8') | |
425 if isinstance(fragment, str): | |
426 fragment = fragment.encode('utf-8') | |
427 | |
428 if isinstance(params, (str, bytes)): | |
429 params = to_native_string(params) | |
430 | |
431 enc_params = self._encode_params(params) | |
432 if enc_params: | |
433 if query: | |
434 query = '%s&%s' % (query, enc_params) | |
435 else: | |
436 query = enc_params | |
437 | |
438 url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) | |
439 self.url = url | |
440 | |
441 def prepare_headers(self, headers): | |
442 """Prepares the given HTTP headers.""" | |
443 | |
444 self.headers = CaseInsensitiveDict() | |
445 if headers: | |
446 for header in headers.items(): | |
447 # Raise exception on invalid header value. | |
448 check_header_validity(header) | |
449 name, value = header | |
450 self.headers[to_native_string(name)] = value | |
451 | |
452 def prepare_body(self, data, files, json=None): | |
453 """Prepares the given HTTP body data.""" | |
454 | |
455 # Check if file, fo, generator, iterator. | |
456 # If not, run through normal process. | |
457 | |
458 # Nottin' on you. | |
459 body = None | |
460 content_type = None | |
461 | |
462 if not data and json is not None: | |
463 # urllib3 requires a bytes-like body. Python 2's json.dumps | |
464 # provides this natively, but Python 3 gives a Unicode string. | |
465 content_type = 'application/json' | |
466 body = complexjson.dumps(json) | |
467 if not isinstance(body, bytes): | |
468 body = body.encode('utf-8') | |
469 | |
470 is_stream = all([ | |
471 hasattr(data, '__iter__'), | |
472 not isinstance(data, (basestring, list, tuple, Mapping)) | |
473 ]) | |
474 | |
475 try: | |
476 length = super_len(data) | |
477 except (TypeError, AttributeError, UnsupportedOperation): | |
478 length = None | |
479 | |
480 if is_stream: | |
481 body = data | |
482 | |
483 if getattr(body, 'tell', None) is not None: | |
484 # Record the current file position before reading. | |
485 # This will allow us to rewind a file in the event | |
486 # of a redirect. | |
487 try: | |
488 self._body_position = body.tell() | |
489 except (IOError, OSError): | |
490 # This differentiates from None, allowing us to catch | |
491 # a failed `tell()` later when trying to rewind the body | |
492 self._body_position = object() | |
493 | |
494 if files: | |
495 raise NotImplementedError('Streamed bodies and files are mutually exclusive.') | |
496 | |
497 if length: | |
498 self.headers['Content-Length'] = builtin_str(length) | |
499 else: | |
500 self.headers['Transfer-Encoding'] = 'chunked' | |
501 else: | |
502 # Multi-part file uploads. | |
503 if files: | |
504 (body, content_type) = self._encode_files(files, data) | |
505 else: | |
506 if data: | |
507 body = self._encode_params(data) | |
508 if isinstance(data, basestring) or hasattr(data, 'read'): | |
509 content_type = None | |
510 else: | |
511 content_type = 'application/x-www-form-urlencoded' | |
512 | |
513 self.prepare_content_length(body) | |
514 | |
515 # Add content-type if it wasn't explicitly provided. | |
516 if content_type and ('content-type' not in self.headers): | |
517 self.headers['Content-Type'] = content_type | |
518 | |
519 self.body = body | |
520 | |
521 def prepare_content_length(self, body): | |
522 """Prepare Content-Length header based on request method and body""" | |
523 if body is not None: | |
524 length = super_len(body) | |
525 if length: | |
526 # If length exists, set it. Otherwise, we fallback | |
527 # to Transfer-Encoding: chunked. | |
528 self.headers['Content-Length'] = builtin_str(length) | |
529 elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: | |
530 # Set Content-Length to 0 for methods that can have a body | |
531 # but don't provide one. (i.e. not GET or HEAD) | |
532 self.headers['Content-Length'] = '0' | |
533 | |
534 def prepare_auth(self, auth, url=''): | |
535 """Prepares the given HTTP auth data.""" | |
536 | |
537 # If no Auth is explicitly provided, extract it from the URL first. | |
538 if auth is None: | |
539 url_auth = get_auth_from_url(self.url) | |
540 auth = url_auth if any(url_auth) else None | |
541 | |
542 if auth: | |
543 if isinstance(auth, tuple) and len(auth) == 2: | |
544 # special-case basic HTTP auth | |
545 auth = HTTPBasicAuth(*auth) | |
546 | |
547 # Allow auth to make its changes. | |
548 r = auth(self) | |
549 | |
550 # Update self to reflect the auth changes. | |
551 self.__dict__.update(r.__dict__) | |
552 | |
553 # Recompute Content-Length | |
554 self.prepare_content_length(self.body) | |
555 | |
556 def prepare_cookies(self, cookies): | |
557 """Prepares the given HTTP cookie data. | |
558 | |
559 This function eventually generates a ``Cookie`` header from the | |
560 given cookies using cookielib. Due to cookielib's design, the header | |
561 will not be regenerated if it already exists, meaning this function | |
562 can only be called once for the life of the | |
563 :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls | |
564 to ``prepare_cookies`` will have no actual effect, unless the "Cookie" | |
565 header is removed beforehand. | |
566 """ | |
567 if isinstance(cookies, cookielib.CookieJar): | |
568 self._cookies = cookies | |
569 else: | |
570 self._cookies = cookiejar_from_dict(cookies) | |
571 | |
572 cookie_header = get_cookie_header(self._cookies, self) | |
573 if cookie_header is not None: | |
574 self.headers['Cookie'] = cookie_header | |
575 | |
576 def prepare_hooks(self, hooks): | |
577 """Prepares the given hooks.""" | |
578 # hooks can be passed as None to the prepare method and to this | |
579 # method. To prevent iterating over None, simply use an empty list | |
580 # if hooks is False-y | |
581 hooks = hooks or [] | |
582 for event in hooks: | |
583 self.register_hook(event, hooks[event]) | |
584 | |
585 | |
586 class Response(object): | |
587 """The :class:`Response <Response>` object, which contains a | |
588 server's response to an HTTP request. | |
589 """ | |
590 | |
591 __attrs__ = [ | |
592 '_content', 'status_code', 'headers', 'url', 'history', | |
593 'encoding', 'reason', 'cookies', 'elapsed', 'request' | |
594 ] | |
595 | |
596 def __init__(self): | |
597 self._content = False | |
598 self._content_consumed = False | |
599 self._next = None | |
600 | |
601 #: Integer Code of responded HTTP Status, e.g. 404 or 200. | |
602 self.status_code = None | |
603 | |
604 #: Case-insensitive Dictionary of Response Headers. | |
605 #: For example, ``headers['content-encoding']`` will return the | |
606 #: value of a ``'Content-Encoding'`` response header. | |
607 self.headers = CaseInsensitiveDict() | |
608 | |
609 #: File-like object representation of response (for advanced usage). | |
610 #: Use of ``raw`` requires that ``stream=True`` be set on the request. | |
611 # This requirement does not apply for use internally to Requests. | |
612 self.raw = None | |
613 | |
614 #: Final URL location of Response. | |
615 self.url = None | |
616 | |
617 #: Encoding to decode with when accessing r.text. | |
618 self.encoding = None | |
619 | |
620 #: A list of :class:`Response <Response>` objects from | |
621 #: the history of the Request. Any redirect responses will end | |
622 #: up here. The list is sorted from the oldest to the most recent request. | |
623 self.history = [] | |
624 | |
625 #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". | |
626 self.reason = None | |
627 | |
628 #: A CookieJar of Cookies the server sent back. | |
629 self.cookies = cookiejar_from_dict({}) | |
630 | |
631 #: The amount of time elapsed between sending the request | |
632 #: and the arrival of the response (as a timedelta). | |
633 #: This property specifically measures the time taken between sending | |
634 #: the first byte of the request and finishing parsing the headers. It | |
635 #: is therefore unaffected by consuming the response content or the | |
636 #: value of the ``stream`` keyword argument. | |
637 self.elapsed = datetime.timedelta(0) | |
638 | |
639 #: The :class:`PreparedRequest <PreparedRequest>` object to which this | |
640 #: is a response. | |
641 self.request = None | |
642 | |
643 def __enter__(self): | |
644 return self | |
645 | |
646 def __exit__(self, *args): | |
647 self.close() | |
648 | |
649 def __getstate__(self): | |
650 # Consume everything; accessing the content attribute makes | |
651 # sure the content has been fully read. | |
652 if not self._content_consumed: | |
653 self.content | |
654 | |
655 return {attr: getattr(self, attr, None) for attr in self.__attrs__} | |
656 | |
657 def __setstate__(self, state): | |
658 for name, value in state.items(): | |
659 setattr(self, name, value) | |
660 | |
661 # pickled objects do not have .raw | |
662 setattr(self, '_content_consumed', True) | |
663 setattr(self, 'raw', None) | |
664 | |
665 def __repr__(self): | |
666 return '<Response [%s]>' % (self.status_code) | |
667 | |
668 def __bool__(self): | |
669 """Returns True if :attr:`status_code` is less than 400. | |
670 | |
671 This attribute checks if the status code of the response is between | |
672 400 and 600 to see if there was a client error or a server error. If | |
673 the status code, is between 200 and 400, this will return True. This | |
674 is **not** a check to see if the response code is ``200 OK``. | |
675 """ | |
676 return self.ok | |
677 | |
678 def __nonzero__(self): | |
679 """Returns True if :attr:`status_code` is less than 400. | |
680 | |
681 This attribute checks if the status code of the response is between | |
682 400 and 600 to see if there was a client error or a server error. If | |
683 the status code, is between 200 and 400, this will return True. This | |
684 is **not** a check to see if the response code is ``200 OK``. | |
685 """ | |
686 return self.ok | |
687 | |
688 def __iter__(self): | |
689 """Allows you to use a response as an iterator.""" | |
690 return self.iter_content(128) | |
691 | |
692 @property | |
693 def ok(self): | |
694 """Returns True if :attr:`status_code` is less than 400, False if not. | |
695 | |
696 This attribute checks if the status code of the response is between | |
697 400 and 600 to see if there was a client error or a server error. If | |
698 the status code is between 200 and 400, this will return True. This | |
699 is **not** a check to see if the response code is ``200 OK``. | |
700 """ | |
701 try: | |
702 self.raise_for_status() | |
703 except HTTPError: | |
704 return False | |
705 return True | |
706 | |
707 @property | |
708 def is_redirect(self): | |
709 """True if this Response is a well-formed HTTP redirect that could have | |
710 been processed automatically (by :meth:`Session.resolve_redirects`). | |
711 """ | |
712 return ('location' in self.headers and self.status_code in REDIRECT_STATI) | |
713 | |
714 @property | |
715 def is_permanent_redirect(self): | |
716 """True if this Response one of the permanent versions of redirect.""" | |
717 return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) | |
718 | |
719 @property | |
720 def next(self): | |
721 """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" | |
722 return self._next | |
723 | |
724 @property | |
725 def apparent_encoding(self): | |
726 """The apparent encoding, provided by the chardet library.""" | |
727 return chardet.detect(self.content)['encoding'] | |
728 | |
729 def iter_content(self, chunk_size=1, decode_unicode=False): | |
730 """Iterates over the response data. When stream=True is set on the | |
731 request, this avoids reading the content at once into memory for | |
732 large responses. The chunk size is the number of bytes it should | |
733 read into memory. This is not necessarily the length of each item | |
734 returned as decoding can take place. | |
735 | |
736 chunk_size must be of type int or None. A value of None will | |
737 function differently depending on the value of `stream`. | |
738 stream=True will read data as it arrives in whatever size the | |
739 chunks are received. If stream=False, data is returned as | |
740 a single chunk. | |
741 | |
742 If decode_unicode is True, content will be decoded using the best | |
743 available encoding based on the response. | |
744 """ | |
745 | |
746 def generate(): | |
747 # Special case for urllib3. | |
748 if hasattr(self.raw, 'stream'): | |
749 try: | |
750 for chunk in self.raw.stream(chunk_size, decode_content=True): | |
751 yield chunk | |
752 except ProtocolError as e: | |
753 raise ChunkedEncodingError(e) | |
754 except DecodeError as e: | |
755 raise ContentDecodingError(e) | |
756 except ReadTimeoutError as e: | |
757 raise ConnectionError(e) | |
758 else: | |
759 # Standard file-like object. | |
760 while True: | |
761 chunk = self.raw.read(chunk_size) | |
762 if not chunk: | |
763 break | |
764 yield chunk | |
765 | |
766 self._content_consumed = True | |
767 | |
768 if self._content_consumed and isinstance(self._content, bool): | |
769 raise StreamConsumedError() | |
770 elif chunk_size is not None and not isinstance(chunk_size, int): | |
771 raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) | |
772 # simulate reading small chunks of the content | |
773 reused_chunks = iter_slices(self._content, chunk_size) | |
774 | |
775 stream_chunks = generate() | |
776 | |
777 chunks = reused_chunks if self._content_consumed else stream_chunks | |
778 | |
779 if decode_unicode: | |
780 chunks = stream_decode_response_unicode(chunks, self) | |
781 | |
782 return chunks | |
783 | |
784 def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): | |
785 """Iterates over the response data, one line at a time. When | |
786 stream=True is set on the request, this avoids reading the | |
787 content at once into memory for large responses. | |
788 | |
789 .. note:: This method is not reentrant safe. | |
790 """ | |
791 | |
792 pending = None | |
793 | |
794 for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): | |
795 | |
796 if pending is not None: | |
797 chunk = pending + chunk | |
798 | |
799 if delimiter: | |
800 lines = chunk.split(delimiter) | |
801 else: | |
802 lines = chunk.splitlines() | |
803 | |
804 if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: | |
805 pending = lines.pop() | |
806 else: | |
807 pending = None | |
808 | |
809 for line in lines: | |
810 yield line | |
811 | |
812 if pending is not None: | |
813 yield pending | |
814 | |
815 @property | |
816 def content(self): | |
817 """Content of the response, in bytes.""" | |
818 | |
819 if self._content is False: | |
820 # Read the contents. | |
821 if self._content_consumed: | |
822 raise RuntimeError( | |
823 'The content for this response was already consumed') | |
824 | |
825 if self.status_code == 0 or self.raw is None: | |
826 self._content = None | |
827 else: | |
828 self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' | |
829 | |
830 self._content_consumed = True | |
831 # don't need to release the connection; that's been handled by urllib3 | |
832 # since we exhausted the data. | |
833 return self._content | |
834 | |
835 @property | |
836 def text(self): | |
837 """Content of the response, in unicode. | |
838 | |
839 If Response.encoding is None, encoding will be guessed using | |
840 ``chardet``. | |
841 | |
842 The encoding of the response content is determined based solely on HTTP | |
843 headers, following RFC 2616 to the letter. If you can take advantage of | |
844 non-HTTP knowledge to make a better guess at the encoding, you should | |
845 set ``r.encoding`` appropriately before accessing this property. | |
846 """ | |
847 | |
848 # Try charset from content-type | |
849 content = None | |
850 encoding = self.encoding | |
851 | |
852 if not self.content: | |
853 return str('') | |
854 | |
855 # Fallback to auto-detected encoding. | |
856 if self.encoding is None: | |
857 encoding = self.apparent_encoding | |
858 | |
859 # Decode unicode from given encoding. | |
860 try: | |
861 content = str(self.content, encoding, errors='replace') | |
862 except (LookupError, TypeError): | |
863 # A LookupError is raised if the encoding was not found which could | |
864 # indicate a misspelling or similar mistake. | |
865 # | |
866 # A TypeError can be raised if encoding is None | |
867 # | |
868 # So we try blindly encoding. | |
869 content = str(self.content, errors='replace') | |
870 | |
871 return content | |
872 | |
873 def json(self, **kwargs): | |
874 r"""Returns the json-encoded content of a response, if any. | |
875 | |
876 :param \*\*kwargs: Optional arguments that ``json.loads`` takes. | |
877 :raises ValueError: If the response body does not contain valid json. | |
878 """ | |
879 | |
880 if not self.encoding and self.content and len(self.content) > 3: | |
881 # No encoding set. JSON RFC 4627 section 3 states we should expect | |
882 # UTF-8, -16 or -32. Detect which one to use; If the detection or | |
883 # decoding fails, fall back to `self.text` (using chardet to make | |
884 # a best guess). | |
885 encoding = guess_json_utf(self.content) | |
886 if encoding is not None: | |
887 try: | |
888 return complexjson.loads( | |
889 self.content.decode(encoding), **kwargs | |
890 ) | |
891 except UnicodeDecodeError: | |
892 # Wrong UTF codec detected; usually because it's not UTF-8 | |
893 # but some other 8-bit codec. This is an RFC violation, | |
894 # and the server didn't bother to tell us what codec *was* | |
895 # used. | |
896 pass | |
897 return complexjson.loads(self.text, **kwargs) | |
898 | |
899 @property | |
900 def links(self): | |
901 """Returns the parsed header links of the response, if any.""" | |
902 | |
903 header = self.headers.get('link') | |
904 | |
905 # l = MultiDict() | |
906 l = {} | |
907 | |
908 if header: | |
909 links = parse_header_links(header) | |
910 | |
911 for link in links: | |
912 key = link.get('rel') or link.get('url') | |
913 l[key] = link | |
914 | |
915 return l | |
916 | |
917 def raise_for_status(self): | |
918 """Raises stored :class:`HTTPError`, if one occurred.""" | |
919 | |
920 http_error_msg = '' | |
921 if isinstance(self.reason, bytes): | |
922 # We attempt to decode utf-8 first because some servers | |
923 # choose to localize their reason strings. If the string | |
924 # isn't utf-8, we fall back to iso-8859-1 for all other | |
925 # encodings. (See PR #3538) | |
926 try: | |
927 reason = self.reason.decode('utf-8') | |
928 except UnicodeDecodeError: | |
929 reason = self.reason.decode('iso-8859-1') | |
930 else: | |
931 reason = self.reason | |
932 | |
933 if 400 <= self.status_code < 500: | |
934 http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) | |
935 | |
936 elif 500 <= self.status_code < 600: | |
937 http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) | |
938 | |
939 if http_error_msg: | |
940 raise HTTPError(http_error_msg, response=self) | |
941 | |
942 def close(self): | |
943 """Releases the connection back to the pool. Once this method has been | |
944 called the underlying ``raw`` object must not be accessed again. | |
945 | |
946 *Note: Should not normally need to be called explicitly.* | |
947 """ | |
948 if not self._content_consumed: | |
949 self.raw.close() | |
950 | |
951 release_conn = getattr(self.raw, 'release_conn', None) | |
952 if release_conn is not None: | |
953 release_conn() |