Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/requests/models.py @ 5:9b1c78e6ba9c draft default tip
"planemo upload commit 6c0a8142489327ece472c84e558c47da711a9142"
author | shellac |
---|---|
date | Mon, 01 Jun 2020 08:59:25 -0400 |
parents | 79f47841a781 |
children |
comparison
equal
deleted
inserted
replaced
4:79f47841a781 | 5:9b1c78e6ba9c |
---|---|
1 # -*- coding: utf-8 -*- | |
2 | |
3 """ | |
4 requests.models | |
5 ~~~~~~~~~~~~~~~ | |
6 | |
7 This module contains the primary objects that power Requests. | |
8 """ | |
9 | |
10 import datetime | |
11 import sys | |
12 | |
13 # Import encoding now, to avoid implicit import later. | |
14 # Implicit import within threads may cause LookupError when standard library is in a ZIP, | |
15 # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. | |
16 import encodings.idna | |
17 | |
18 from urllib3.fields import RequestField | |
19 from urllib3.filepost import encode_multipart_formdata | |
20 from urllib3.util import parse_url | |
21 from urllib3.exceptions import ( | |
22 DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) | |
23 | |
24 from io import UnsupportedOperation | |
25 from .hooks import default_hooks | |
26 from .structures import CaseInsensitiveDict | |
27 | |
28 from .auth import HTTPBasicAuth | |
29 from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar | |
30 from .exceptions import ( | |
31 HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, | |
32 ContentDecodingError, ConnectionError, StreamConsumedError) | |
33 from ._internal_utils import to_native_string, unicode_is_ascii | |
34 from .utils import ( | |
35 guess_filename, get_auth_from_url, requote_uri, | |
36 stream_decode_response_unicode, to_key_val_list, parse_header_links, | |
37 iter_slices, guess_json_utf, super_len, check_header_validity) | |
38 from .compat import ( | |
39 Callable, Mapping, | |
40 cookielib, urlunparse, urlsplit, urlencode, str, bytes, | |
41 is_py2, chardet, builtin_str, basestring) | |
42 from .compat import json as complexjson | |
43 from .status_codes import codes | |
44 | |
45 #: The set of HTTP status codes that indicate an automatically | |
46 #: processable redirect. | |
47 REDIRECT_STATI = ( | |
48 codes.moved, # 301 | |
49 codes.found, # 302 | |
50 codes.other, # 303 | |
51 codes.temporary_redirect, # 307 | |
52 codes.permanent_redirect, # 308 | |
53 ) | |
54 | |
55 DEFAULT_REDIRECT_LIMIT = 30 | |
56 CONTENT_CHUNK_SIZE = 10 * 1024 | |
57 ITER_CHUNK_SIZE = 512 | |
58 | |
59 | |
60 class RequestEncodingMixin(object): | |
61 @property | |
62 def path_url(self): | |
63 """Build the path URL to use.""" | |
64 | |
65 url = [] | |
66 | |
67 p = urlsplit(self.url) | |
68 | |
69 path = p.path | |
70 if not path: | |
71 path = '/' | |
72 | |
73 url.append(path) | |
74 | |
75 query = p.query | |
76 if query: | |
77 url.append('?') | |
78 url.append(query) | |
79 | |
80 return ''.join(url) | |
81 | |
82 @staticmethod | |
83 def _encode_params(data): | |
84 """Encode parameters in a piece of data. | |
85 | |
86 Will successfully encode parameters when passed as a dict or a list of | |
87 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary | |
88 if parameters are supplied as a dict. | |
89 """ | |
90 | |
91 if isinstance(data, (str, bytes)): | |
92 return data | |
93 elif hasattr(data, 'read'): | |
94 return data | |
95 elif hasattr(data, '__iter__'): | |
96 result = [] | |
97 for k, vs in to_key_val_list(data): | |
98 if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): | |
99 vs = [vs] | |
100 for v in vs: | |
101 if v is not None: | |
102 result.append( | |
103 (k.encode('utf-8') if isinstance(k, str) else k, | |
104 v.encode('utf-8') if isinstance(v, str) else v)) | |
105 return urlencode(result, doseq=True) | |
106 else: | |
107 return data | |
108 | |
109 @staticmethod | |
110 def _encode_files(files, data): | |
111 """Build the body for a multipart/form-data request. | |
112 | |
113 Will successfully encode files when passed as a dict or a list of | |
114 tuples. Order is retained if data is a list of tuples but arbitrary | |
115 if parameters are supplied as a dict. | |
116 The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) | |
117 or 4-tuples (filename, fileobj, contentype, custom_headers). | |
118 """ | |
119 if (not files): | |
120 raise ValueError("Files must be provided.") | |
121 elif isinstance(data, basestring): | |
122 raise ValueError("Data must not be a string.") | |
123 | |
124 new_fields = [] | |
125 fields = to_key_val_list(data or {}) | |
126 files = to_key_val_list(files or {}) | |
127 | |
128 for field, val in fields: | |
129 if isinstance(val, basestring) or not hasattr(val, '__iter__'): | |
130 val = [val] | |
131 for v in val: | |
132 if v is not None: | |
133 # Don't call str() on bytestrings: in Py3 it all goes wrong. | |
134 if not isinstance(v, bytes): | |
135 v = str(v) | |
136 | |
137 new_fields.append( | |
138 (field.decode('utf-8') if isinstance(field, bytes) else field, | |
139 v.encode('utf-8') if isinstance(v, str) else v)) | |
140 | |
141 for (k, v) in files: | |
142 # support for explicit filename | |
143 ft = None | |
144 fh = None | |
145 if isinstance(v, (tuple, list)): | |
146 if len(v) == 2: | |
147 fn, fp = v | |
148 elif len(v) == 3: | |
149 fn, fp, ft = v | |
150 else: | |
151 fn, fp, ft, fh = v | |
152 else: | |
153 fn = guess_filename(v) or k | |
154 fp = v | |
155 | |
156 if isinstance(fp, (str, bytes, bytearray)): | |
157 fdata = fp | |
158 elif hasattr(fp, 'read'): | |
159 fdata = fp.read() | |
160 elif fp is None: | |
161 continue | |
162 else: | |
163 fdata = fp | |
164 | |
165 rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) | |
166 rf.make_multipart(content_type=ft) | |
167 new_fields.append(rf) | |
168 | |
169 body, content_type = encode_multipart_formdata(new_fields) | |
170 | |
171 return body, content_type | |
172 | |
173 | |
174 class RequestHooksMixin(object): | |
175 def register_hook(self, event, hook): | |
176 """Properly register a hook.""" | |
177 | |
178 if event not in self.hooks: | |
179 raise ValueError('Unsupported event specified, with event name "%s"' % (event)) | |
180 | |
181 if isinstance(hook, Callable): | |
182 self.hooks[event].append(hook) | |
183 elif hasattr(hook, '__iter__'): | |
184 self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) | |
185 | |
186 def deregister_hook(self, event, hook): | |
187 """Deregister a previously registered hook. | |
188 Returns True if the hook existed, False if not. | |
189 """ | |
190 | |
191 try: | |
192 self.hooks[event].remove(hook) | |
193 return True | |
194 except ValueError: | |
195 return False | |
196 | |
197 | |
198 class Request(RequestHooksMixin): | |
199 """A user-created :class:`Request <Request>` object. | |
200 | |
201 Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. | |
202 | |
203 :param method: HTTP method to use. | |
204 :param url: URL to send. | |
205 :param headers: dictionary of headers to send. | |
206 :param files: dictionary of {filename: fileobject} files to multipart upload. | |
207 :param data: the body to attach to the request. If a dictionary or | |
208 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
209 take place. | |
210 :param json: json for the body to attach to the request (if files or data is not specified). | |
211 :param params: URL parameters to append to the URL. If a dictionary or | |
212 list of tuples ``[(key, value)]`` is provided, form-encoding will | |
213 take place. | |
214 :param auth: Auth handler or (user, pass) tuple. | |
215 :param cookies: dictionary or CookieJar of cookies to attach to this request. | |
216 :param hooks: dictionary of callback hooks, for internal usage. | |
217 | |
218 Usage:: | |
219 | |
220 >>> import requests | |
221 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
222 >>> req.prepare() | |
223 <PreparedRequest [GET]> | |
224 """ | |
225 | |
226 def __init__(self, | |
227 method=None, url=None, headers=None, files=None, data=None, | |
228 params=None, auth=None, cookies=None, hooks=None, json=None): | |
229 | |
230 # Default empty dicts for dict params. | |
231 data = [] if data is None else data | |
232 files = [] if files is None else files | |
233 headers = {} if headers is None else headers | |
234 params = {} if params is None else params | |
235 hooks = {} if hooks is None else hooks | |
236 | |
237 self.hooks = default_hooks() | |
238 for (k, v) in list(hooks.items()): | |
239 self.register_hook(event=k, hook=v) | |
240 | |
241 self.method = method | |
242 self.url = url | |
243 self.headers = headers | |
244 self.files = files | |
245 self.data = data | |
246 self.json = json | |
247 self.params = params | |
248 self.auth = auth | |
249 self.cookies = cookies | |
250 | |
251 def __repr__(self): | |
252 return '<Request [%s]>' % (self.method) | |
253 | |
254 def prepare(self): | |
255 """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" | |
256 p = PreparedRequest() | |
257 p.prepare( | |
258 method=self.method, | |
259 url=self.url, | |
260 headers=self.headers, | |
261 files=self.files, | |
262 data=self.data, | |
263 json=self.json, | |
264 params=self.params, | |
265 auth=self.auth, | |
266 cookies=self.cookies, | |
267 hooks=self.hooks, | |
268 ) | |
269 return p | |
270 | |
271 | |
272 class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): | |
273 """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, | |
274 containing the exact bytes that will be sent to the server. | |
275 | |
276 Generated from either a :class:`Request <Request>` object or manually. | |
277 | |
278 Usage:: | |
279 | |
280 >>> import requests | |
281 >>> req = requests.Request('GET', 'https://httpbin.org/get') | |
282 >>> r = req.prepare() | |
283 >>> r | |
284 <PreparedRequest [GET]> | |
285 | |
286 >>> s = requests.Session() | |
287 >>> s.send(r) | |
288 <Response [200]> | |
289 """ | |
290 | |
291 def __init__(self): | |
292 #: HTTP verb to send to the server. | |
293 self.method = None | |
294 #: HTTP URL to send the request to. | |
295 self.url = None | |
296 #: dictionary of HTTP headers. | |
297 self.headers = None | |
298 # The `CookieJar` used to create the Cookie header will be stored here | |
299 # after prepare_cookies is called | |
300 self._cookies = None | |
301 #: request body to send to the server. | |
302 self.body = None | |
303 #: dictionary of callback hooks, for internal usage. | |
304 self.hooks = default_hooks() | |
305 #: integer denoting starting position of a readable file-like body. | |
306 self._body_position = None | |
307 | |
308 def prepare(self, | |
309 method=None, url=None, headers=None, files=None, data=None, | |
310 params=None, auth=None, cookies=None, hooks=None, json=None): | |
311 """Prepares the entire request with the given parameters.""" | |
312 | |
313 self.prepare_method(method) | |
314 self.prepare_url(url, params) | |
315 self.prepare_headers(headers) | |
316 self.prepare_cookies(cookies) | |
317 self.prepare_body(data, files, json) | |
318 self.prepare_auth(auth, url) | |
319 | |
320 # Note that prepare_auth must be last to enable authentication schemes | |
321 # such as OAuth to work on a fully prepared request. | |
322 | |
323 # This MUST go after prepare_auth. Authenticators could add a hook | |
324 self.prepare_hooks(hooks) | |
325 | |
326 def __repr__(self): | |
327 return '<PreparedRequest [%s]>' % (self.method) | |
328 | |
329 def copy(self): | |
330 p = PreparedRequest() | |
331 p.method = self.method | |
332 p.url = self.url | |
333 p.headers = self.headers.copy() if self.headers is not None else None | |
334 p._cookies = _copy_cookie_jar(self._cookies) | |
335 p.body = self.body | |
336 p.hooks = self.hooks | |
337 p._body_position = self._body_position | |
338 return p | |
339 | |
340 def prepare_method(self, method): | |
341 """Prepares the given HTTP method.""" | |
342 self.method = method | |
343 if self.method is not None: | |
344 self.method = to_native_string(self.method.upper()) | |
345 | |
346 @staticmethod | |
347 def _get_idna_encoded_host(host): | |
348 import idna | |
349 | |
350 try: | |
351 host = idna.encode(host, uts46=True).decode('utf-8') | |
352 except idna.IDNAError: | |
353 raise UnicodeError | |
354 return host | |
355 | |
356 def prepare_url(self, url, params): | |
357 """Prepares the given HTTP URL.""" | |
358 #: Accept objects that have string representations. | |
359 #: We're unable to blindly call unicode/str functions | |
360 #: as this will include the bytestring indicator (b'') | |
361 #: on python 3.x. | |
362 #: https://github.com/psf/requests/pull/2238 | |
363 if isinstance(url, bytes): | |
364 url = url.decode('utf8') | |
365 else: | |
366 url = unicode(url) if is_py2 else str(url) | |
367 | |
368 # Remove leading whitespaces from url | |
369 url = url.lstrip() | |
370 | |
371 # Don't do any URL preparation for non-HTTP schemes like `mailto`, | |
372 # `data` etc to work around exceptions from `url_parse`, which | |
373 # handles RFC 3986 only. | |
374 if ':' in url and not url.lower().startswith('http'): | |
375 self.url = url | |
376 return | |
377 | |
378 # Support for unicode domain names and paths. | |
379 try: | |
380 scheme, auth, host, port, path, query, fragment = parse_url(url) | |
381 except LocationParseError as e: | |
382 raise InvalidURL(*e.args) | |
383 | |
384 if not scheme: | |
385 error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") | |
386 error = error.format(to_native_string(url, 'utf8')) | |
387 | |
388 raise MissingSchema(error) | |
389 | |
390 if not host: | |
391 raise InvalidURL("Invalid URL %r: No host supplied" % url) | |
392 | |
393 # In general, we want to try IDNA encoding the hostname if the string contains | |
394 # non-ASCII characters. This allows users to automatically get the correct IDNA | |
395 # behaviour. For strings containing only ASCII characters, we need to also verify | |
396 # it doesn't start with a wildcard (*), before allowing the unencoded hostname. | |
397 if not unicode_is_ascii(host): | |
398 try: | |
399 host = self._get_idna_encoded_host(host) | |
400 except UnicodeError: | |
401 raise InvalidURL('URL has an invalid label.') | |
402 elif host.startswith(u'*'): | |
403 raise InvalidURL('URL has an invalid label.') | |
404 | |
405 # Carefully reconstruct the network location | |
406 netloc = auth or '' | |
407 if netloc: | |
408 netloc += '@' | |
409 netloc += host | |
410 if port: | |
411 netloc += ':' + str(port) | |
412 | |
413 # Bare domains aren't valid URLs. | |
414 if not path: | |
415 path = '/' | |
416 | |
417 if is_py2: | |
418 if isinstance(scheme, str): | |
419 scheme = scheme.encode('utf-8') | |
420 if isinstance(netloc, str): | |
421 netloc = netloc.encode('utf-8') | |
422 if isinstance(path, str): | |
423 path = path.encode('utf-8') | |
424 if isinstance(query, str): | |
425 query = query.encode('utf-8') | |
426 if isinstance(fragment, str): | |
427 fragment = fragment.encode('utf-8') | |
428 | |
429 if isinstance(params, (str, bytes)): | |
430 params = to_native_string(params) | |
431 | |
432 enc_params = self._encode_params(params) | |
433 if enc_params: | |
434 if query: | |
435 query = '%s&%s' % (query, enc_params) | |
436 else: | |
437 query = enc_params | |
438 | |
439 url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) | |
440 self.url = url | |
441 | |
442 def prepare_headers(self, headers): | |
443 """Prepares the given HTTP headers.""" | |
444 | |
445 self.headers = CaseInsensitiveDict() | |
446 if headers: | |
447 for header in headers.items(): | |
448 # Raise exception on invalid header value. | |
449 check_header_validity(header) | |
450 name, value = header | |
451 self.headers[to_native_string(name)] = value | |
452 | |
453 def prepare_body(self, data, files, json=None): | |
454 """Prepares the given HTTP body data.""" | |
455 | |
456 # Check if file, fo, generator, iterator. | |
457 # If not, run through normal process. | |
458 | |
459 # Nottin' on you. | |
460 body = None | |
461 content_type = None | |
462 | |
463 if not data and json is not None: | |
464 # urllib3 requires a bytes-like body. Python 2's json.dumps | |
465 # provides this natively, but Python 3 gives a Unicode string. | |
466 content_type = 'application/json' | |
467 body = complexjson.dumps(json) | |
468 if not isinstance(body, bytes): | |
469 body = body.encode('utf-8') | |
470 | |
471 is_stream = all([ | |
472 hasattr(data, '__iter__'), | |
473 not isinstance(data, (basestring, list, tuple, Mapping)) | |
474 ]) | |
475 | |
476 try: | |
477 length = super_len(data) | |
478 except (TypeError, AttributeError, UnsupportedOperation): | |
479 length = None | |
480 | |
481 if is_stream: | |
482 body = data | |
483 | |
484 if getattr(body, 'tell', None) is not None: | |
485 # Record the current file position before reading. | |
486 # This will allow us to rewind a file in the event | |
487 # of a redirect. | |
488 try: | |
489 self._body_position = body.tell() | |
490 except (IOError, OSError): | |
491 # This differentiates from None, allowing us to catch | |
492 # a failed `tell()` later when trying to rewind the body | |
493 self._body_position = object() | |
494 | |
495 if files: | |
496 raise NotImplementedError('Streamed bodies and files are mutually exclusive.') | |
497 | |
498 if length: | |
499 self.headers['Content-Length'] = builtin_str(length) | |
500 else: | |
501 self.headers['Transfer-Encoding'] = 'chunked' | |
502 else: | |
503 # Multi-part file uploads. | |
504 if files: | |
505 (body, content_type) = self._encode_files(files, data) | |
506 else: | |
507 if data: | |
508 body = self._encode_params(data) | |
509 if isinstance(data, basestring) or hasattr(data, 'read'): | |
510 content_type = None | |
511 else: | |
512 content_type = 'application/x-www-form-urlencoded' | |
513 | |
514 self.prepare_content_length(body) | |
515 | |
516 # Add content-type if it wasn't explicitly provided. | |
517 if content_type and ('content-type' not in self.headers): | |
518 self.headers['Content-Type'] = content_type | |
519 | |
520 self.body = body | |
521 | |
522 def prepare_content_length(self, body): | |
523 """Prepare Content-Length header based on request method and body""" | |
524 if body is not None: | |
525 length = super_len(body) | |
526 if length: | |
527 # If length exists, set it. Otherwise, we fallback | |
528 # to Transfer-Encoding: chunked. | |
529 self.headers['Content-Length'] = builtin_str(length) | |
530 elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: | |
531 # Set Content-Length to 0 for methods that can have a body | |
532 # but don't provide one. (i.e. not GET or HEAD) | |
533 self.headers['Content-Length'] = '0' | |
534 | |
535 def prepare_auth(self, auth, url=''): | |
536 """Prepares the given HTTP auth data.""" | |
537 | |
538 # If no Auth is explicitly provided, extract it from the URL first. | |
539 if auth is None: | |
540 url_auth = get_auth_from_url(self.url) | |
541 auth = url_auth if any(url_auth) else None | |
542 | |
543 if auth: | |
544 if isinstance(auth, tuple) and len(auth) == 2: | |
545 # special-case basic HTTP auth | |
546 auth = HTTPBasicAuth(*auth) | |
547 | |
548 # Allow auth to make its changes. | |
549 r = auth(self) | |
550 | |
551 # Update self to reflect the auth changes. | |
552 self.__dict__.update(r.__dict__) | |
553 | |
554 # Recompute Content-Length | |
555 self.prepare_content_length(self.body) | |
556 | |
557 def prepare_cookies(self, cookies): | |
558 """Prepares the given HTTP cookie data. | |
559 | |
560 This function eventually generates a ``Cookie`` header from the | |
561 given cookies using cookielib. Due to cookielib's design, the header | |
562 will not be regenerated if it already exists, meaning this function | |
563 can only be called once for the life of the | |
564 :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls | |
565 to ``prepare_cookies`` will have no actual effect, unless the "Cookie" | |
566 header is removed beforehand. | |
567 """ | |
568 if isinstance(cookies, cookielib.CookieJar): | |
569 self._cookies = cookies | |
570 else: | |
571 self._cookies = cookiejar_from_dict(cookies) | |
572 | |
573 cookie_header = get_cookie_header(self._cookies, self) | |
574 if cookie_header is not None: | |
575 self.headers['Cookie'] = cookie_header | |
576 | |
577 def prepare_hooks(self, hooks): | |
578 """Prepares the given hooks.""" | |
579 # hooks can be passed as None to the prepare method and to this | |
580 # method. To prevent iterating over None, simply use an empty list | |
581 # if hooks is False-y | |
582 hooks = hooks or [] | |
583 for event in hooks: | |
584 self.register_hook(event, hooks[event]) | |
585 | |
586 | |
587 class Response(object): | |
588 """The :class:`Response <Response>` object, which contains a | |
589 server's response to an HTTP request. | |
590 """ | |
591 | |
592 __attrs__ = [ | |
593 '_content', 'status_code', 'headers', 'url', 'history', | |
594 'encoding', 'reason', 'cookies', 'elapsed', 'request' | |
595 ] | |
596 | |
597 def __init__(self): | |
598 self._content = False | |
599 self._content_consumed = False | |
600 self._next = None | |
601 | |
602 #: Integer Code of responded HTTP Status, e.g. 404 or 200. | |
603 self.status_code = None | |
604 | |
605 #: Case-insensitive Dictionary of Response Headers. | |
606 #: For example, ``headers['content-encoding']`` will return the | |
607 #: value of a ``'Content-Encoding'`` response header. | |
608 self.headers = CaseInsensitiveDict() | |
609 | |
610 #: File-like object representation of response (for advanced usage). | |
611 #: Use of ``raw`` requires that ``stream=True`` be set on the request. | |
612 #: This requirement does not apply for use internally to Requests. | |
613 self.raw = None | |
614 | |
615 #: Final URL location of Response. | |
616 self.url = None | |
617 | |
618 #: Encoding to decode with when accessing r.text. | |
619 self.encoding = None | |
620 | |
621 #: A list of :class:`Response <Response>` objects from | |
622 #: the history of the Request. Any redirect responses will end | |
623 #: up here. The list is sorted from the oldest to the most recent request. | |
624 self.history = [] | |
625 | |
626 #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". | |
627 self.reason = None | |
628 | |
629 #: A CookieJar of Cookies the server sent back. | |
630 self.cookies = cookiejar_from_dict({}) | |
631 | |
632 #: The amount of time elapsed between sending the request | |
633 #: and the arrival of the response (as a timedelta). | |
634 #: This property specifically measures the time taken between sending | |
635 #: the first byte of the request and finishing parsing the headers. It | |
636 #: is therefore unaffected by consuming the response content or the | |
637 #: value of the ``stream`` keyword argument. | |
638 self.elapsed = datetime.timedelta(0) | |
639 | |
640 #: The :class:`PreparedRequest <PreparedRequest>` object to which this | |
641 #: is a response. | |
642 self.request = None | |
643 | |
644 def __enter__(self): | |
645 return self | |
646 | |
647 def __exit__(self, *args): | |
648 self.close() | |
649 | |
650 def __getstate__(self): | |
651 # Consume everything; accessing the content attribute makes | |
652 # sure the content has been fully read. | |
653 if not self._content_consumed: | |
654 self.content | |
655 | |
656 return {attr: getattr(self, attr, None) for attr in self.__attrs__} | |
657 | |
658 def __setstate__(self, state): | |
659 for name, value in state.items(): | |
660 setattr(self, name, value) | |
661 | |
662 # pickled objects do not have .raw | |
663 setattr(self, '_content_consumed', True) | |
664 setattr(self, 'raw', None) | |
665 | |
666 def __repr__(self): | |
667 return '<Response [%s]>' % (self.status_code) | |
668 | |
669 def __bool__(self): | |
670 """Returns True if :attr:`status_code` is less than 400. | |
671 | |
672 This attribute checks if the status code of the response is between | |
673 400 and 600 to see if there was a client error or a server error. If | |
674 the status code, is between 200 and 400, this will return True. This | |
675 is **not** a check to see if the response code is ``200 OK``. | |
676 """ | |
677 return self.ok | |
678 | |
679 def __nonzero__(self): | |
680 """Returns True if :attr:`status_code` is less than 400. | |
681 | |
682 This attribute checks if the status code of the response is between | |
683 400 and 600 to see if there was a client error or a server error. If | |
684 the status code, is between 200 and 400, this will return True. This | |
685 is **not** a check to see if the response code is ``200 OK``. | |
686 """ | |
687 return self.ok | |
688 | |
689 def __iter__(self): | |
690 """Allows you to use a response as an iterator.""" | |
691 return self.iter_content(128) | |
692 | |
693 @property | |
694 def ok(self): | |
695 """Returns True if :attr:`status_code` is less than 400, False if not. | |
696 | |
697 This attribute checks if the status code of the response is between | |
698 400 and 600 to see if there was a client error or a server error. If | |
699 the status code is between 200 and 400, this will return True. This | |
700 is **not** a check to see if the response code is ``200 OK``. | |
701 """ | |
702 try: | |
703 self.raise_for_status() | |
704 except HTTPError: | |
705 return False | |
706 return True | |
707 | |
708 @property | |
709 def is_redirect(self): | |
710 """True if this Response is a well-formed HTTP redirect that could have | |
711 been processed automatically (by :meth:`Session.resolve_redirects`). | |
712 """ | |
713 return ('location' in self.headers and self.status_code in REDIRECT_STATI) | |
714 | |
715 @property | |
716 def is_permanent_redirect(self): | |
717 """True if this Response one of the permanent versions of redirect.""" | |
718 return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) | |
719 | |
720 @property | |
721 def next(self): | |
722 """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" | |
723 return self._next | |
724 | |
725 @property | |
726 def apparent_encoding(self): | |
727 """The apparent encoding, provided by the chardet library.""" | |
728 return chardet.detect(self.content)['encoding'] | |
729 | |
730 def iter_content(self, chunk_size=1, decode_unicode=False): | |
731 """Iterates over the response data. When stream=True is set on the | |
732 request, this avoids reading the content at once into memory for | |
733 large responses. The chunk size is the number of bytes it should | |
734 read into memory. This is not necessarily the length of each item | |
735 returned as decoding can take place. | |
736 | |
737 chunk_size must be of type int or None. A value of None will | |
738 function differently depending on the value of `stream`. | |
739 stream=True will read data as it arrives in whatever size the | |
740 chunks are received. If stream=False, data is returned as | |
741 a single chunk. | |
742 | |
743 If decode_unicode is True, content will be decoded using the best | |
744 available encoding based on the response. | |
745 """ | |
746 | |
747 def generate(): | |
748 # Special case for urllib3. | |
749 if hasattr(self.raw, 'stream'): | |
750 try: | |
751 for chunk in self.raw.stream(chunk_size, decode_content=True): | |
752 yield chunk | |
753 except ProtocolError as e: | |
754 raise ChunkedEncodingError(e) | |
755 except DecodeError as e: | |
756 raise ContentDecodingError(e) | |
757 except ReadTimeoutError as e: | |
758 raise ConnectionError(e) | |
759 else: | |
760 # Standard file-like object. | |
761 while True: | |
762 chunk = self.raw.read(chunk_size) | |
763 if not chunk: | |
764 break | |
765 yield chunk | |
766 | |
767 self._content_consumed = True | |
768 | |
769 if self._content_consumed and isinstance(self._content, bool): | |
770 raise StreamConsumedError() | |
771 elif chunk_size is not None and not isinstance(chunk_size, int): | |
772 raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) | |
773 # simulate reading small chunks of the content | |
774 reused_chunks = iter_slices(self._content, chunk_size) | |
775 | |
776 stream_chunks = generate() | |
777 | |
778 chunks = reused_chunks if self._content_consumed else stream_chunks | |
779 | |
780 if decode_unicode: | |
781 chunks = stream_decode_response_unicode(chunks, self) | |
782 | |
783 return chunks | |
784 | |
785 def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): | |
786 """Iterates over the response data, one line at a time. When | |
787 stream=True is set on the request, this avoids reading the | |
788 content at once into memory for large responses. | |
789 | |
790 .. note:: This method is not reentrant safe. | |
791 """ | |
792 | |
793 pending = None | |
794 | |
795 for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): | |
796 | |
797 if pending is not None: | |
798 chunk = pending + chunk | |
799 | |
800 if delimiter: | |
801 lines = chunk.split(delimiter) | |
802 else: | |
803 lines = chunk.splitlines() | |
804 | |
805 if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: | |
806 pending = lines.pop() | |
807 else: | |
808 pending = None | |
809 | |
810 for line in lines: | |
811 yield line | |
812 | |
813 if pending is not None: | |
814 yield pending | |
815 | |
816 @property | |
817 def content(self): | |
818 """Content of the response, in bytes.""" | |
819 | |
820 if self._content is False: | |
821 # Read the contents. | |
822 if self._content_consumed: | |
823 raise RuntimeError( | |
824 'The content for this response was already consumed') | |
825 | |
826 if self.status_code == 0 or self.raw is None: | |
827 self._content = None | |
828 else: | |
829 self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' | |
830 | |
831 self._content_consumed = True | |
832 # don't need to release the connection; that's been handled by urllib3 | |
833 # since we exhausted the data. | |
834 return self._content | |
835 | |
836 @property | |
837 def text(self): | |
838 """Content of the response, in unicode. | |
839 | |
840 If Response.encoding is None, encoding will be guessed using | |
841 ``chardet``. | |
842 | |
843 The encoding of the response content is determined based solely on HTTP | |
844 headers, following RFC 2616 to the letter. If you can take advantage of | |
845 non-HTTP knowledge to make a better guess at the encoding, you should | |
846 set ``r.encoding`` appropriately before accessing this property. | |
847 """ | |
848 | |
849 # Try charset from content-type | |
850 content = None | |
851 encoding = self.encoding | |
852 | |
853 if not self.content: | |
854 return str('') | |
855 | |
856 # Fallback to auto-detected encoding. | |
857 if self.encoding is None: | |
858 encoding = self.apparent_encoding | |
859 | |
860 # Decode unicode from given encoding. | |
861 try: | |
862 content = str(self.content, encoding, errors='replace') | |
863 except (LookupError, TypeError): | |
864 # A LookupError is raised if the encoding was not found which could | |
865 # indicate a misspelling or similar mistake. | |
866 # | |
867 # A TypeError can be raised if encoding is None | |
868 # | |
869 # So we try blindly encoding. | |
870 content = str(self.content, errors='replace') | |
871 | |
872 return content | |
873 | |
874 def json(self, **kwargs): | |
875 r"""Returns the json-encoded content of a response, if any. | |
876 | |
877 :param \*\*kwargs: Optional arguments that ``json.loads`` takes. | |
878 :raises ValueError: If the response body does not contain valid json. | |
879 """ | |
880 | |
881 if not self.encoding and self.content and len(self.content) > 3: | |
882 # No encoding set. JSON RFC 4627 section 3 states we should expect | |
883 # UTF-8, -16 or -32. Detect which one to use; If the detection or | |
884 # decoding fails, fall back to `self.text` (using chardet to make | |
885 # a best guess). | |
886 encoding = guess_json_utf(self.content) | |
887 if encoding is not None: | |
888 try: | |
889 return complexjson.loads( | |
890 self.content.decode(encoding), **kwargs | |
891 ) | |
892 except UnicodeDecodeError: | |
893 # Wrong UTF codec detected; usually because it's not UTF-8 | |
894 # but some other 8-bit codec. This is an RFC violation, | |
895 # and the server didn't bother to tell us what codec *was* | |
896 # used. | |
897 pass | |
898 return complexjson.loads(self.text, **kwargs) | |
899 | |
900 @property | |
901 def links(self): | |
902 """Returns the parsed header links of the response, if any.""" | |
903 | |
904 header = self.headers.get('link') | |
905 | |
906 # l = MultiDict() | |
907 l = {} | |
908 | |
909 if header: | |
910 links = parse_header_links(header) | |
911 | |
912 for link in links: | |
913 key = link.get('rel') or link.get('url') | |
914 l[key] = link | |
915 | |
916 return l | |
917 | |
918 def raise_for_status(self): | |
919 """Raises stored :class:`HTTPError`, if one occurred.""" | |
920 | |
921 http_error_msg = '' | |
922 if isinstance(self.reason, bytes): | |
923 # We attempt to decode utf-8 first because some servers | |
924 # choose to localize their reason strings. If the string | |
925 # isn't utf-8, we fall back to iso-8859-1 for all other | |
926 # encodings. (See PR #3538) | |
927 try: | |
928 reason = self.reason.decode('utf-8') | |
929 except UnicodeDecodeError: | |
930 reason = self.reason.decode('iso-8859-1') | |
931 else: | |
932 reason = self.reason | |
933 | |
934 if 400 <= self.status_code < 500: | |
935 http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) | |
936 | |
937 elif 500 <= self.status_code < 600: | |
938 http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) | |
939 | |
940 if http_error_msg: | |
941 raise HTTPError(http_error_msg, response=self) | |
942 | |
943 def close(self): | |
944 """Releases the connection back to the pool. Once this method has been | |
945 called the underlying ``raw`` object must not be accessed again. | |
946 | |
947 *Note: Should not normally need to be called explicitly.* | |
948 """ | |
949 if not self._content_consumed: | |
950 self.raw.close() | |
951 | |
952 release_conn = getattr(self.raw, 'release_conn', None) | |
953 if release_conn is not None: | |
954 release_conn() |