Mercurial > repos > guerler > springsuite
comparison planemo/lib/python3.7/site-packages/boto/gs/connection.py @ 0:d30785e31577 draft
"planemo upload commit 6eee67778febed82ddd413c3ca40b3183a3898f1"
author | guerler |
---|---|
date | Fri, 31 Jul 2020 00:18:57 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:d30785e31577 |
---|---|
1 # Copyright 2010 Google Inc. | |
2 # | |
3 # Permission is hereby granted, free of charge, to any person obtaining a | |
4 # copy of this software and associated documentation files (the | |
5 # "Software"), to deal in the Software without restriction, including | |
6 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
7 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
8 # persons to whom the Software is furnished to do so, subject to the fol- | |
9 # lowing conditions: | |
10 # | |
11 # The above copyright notice and this permission notice shall be included | |
12 # in all copies or substantial portions of the Software. | |
13 # | |
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
15 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
16 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
17 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
18 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
20 # IN THE SOFTWARE. | |
21 | |
22 from boto.gs.bucket import Bucket | |
23 from boto.s3.connection import S3Connection | |
24 from boto.s3.connection import SubdomainCallingFormat | |
25 from boto.s3.connection import check_lowercase_bucketname | |
26 from boto.utils import get_utf8_value | |
27 | |
28 class Location(object): | |
29 DEFAULT = 'US' | |
30 EU = 'EU' | |
31 | |
32 class GSConnection(S3Connection): | |
33 | |
34 DefaultHost = 'storage.googleapis.com' | |
35 QueryString = 'Signature=%s&Expires=%d&GoogleAccessId=%s' | |
36 | |
37 def __init__(self, gs_access_key_id=None, gs_secret_access_key=None, | |
38 is_secure=True, port=None, proxy=None, proxy_port=None, | |
39 proxy_user=None, proxy_pass=None, | |
40 host=DefaultHost, debug=0, https_connection_factory=None, | |
41 calling_format=SubdomainCallingFormat(), path='/', | |
42 suppress_consec_slashes=True): | |
43 super(GSConnection, self).__init__(gs_access_key_id, gs_secret_access_key, | |
44 is_secure, port, proxy, proxy_port, proxy_user, proxy_pass, | |
45 host, debug, https_connection_factory, calling_format, path, | |
46 "google", Bucket, | |
47 suppress_consec_slashes=suppress_consec_slashes) | |
48 | |
49 def create_bucket(self, bucket_name, headers=None, | |
50 location=Location.DEFAULT, policy=None, | |
51 storage_class='STANDARD'): | |
52 """ | |
53 Creates a new bucket. By default it's located in the USA. You can | |
54 pass Location.EU to create bucket in the EU. You can also pass | |
55 a LocationConstraint for where the bucket should be located, and | |
56 a StorageClass describing how the data should be stored. | |
57 | |
58 :type bucket_name: string | |
59 :param bucket_name: The name of the new bucket. | |
60 | |
61 :type headers: dict | |
62 :param headers: Additional headers to pass along with the request to GCS. | |
63 | |
64 :type location: :class:`boto.gs.connection.Location` | |
65 :param location: The location of the new bucket. | |
66 | |
67 :type policy: :class:`boto.gs.acl.CannedACLStrings` | |
68 :param policy: A canned ACL policy that will be applied to the new key | |
69 in GCS. | |
70 | |
71 :type storage_class: string | |
72 :param storage_class: Either 'STANDARD' or 'DURABLE_REDUCED_AVAILABILITY'. | |
73 | |
74 """ | |
75 check_lowercase_bucketname(bucket_name) | |
76 | |
77 if policy: | |
78 if headers: | |
79 headers[self.provider.acl_header] = policy | |
80 else: | |
81 headers = {self.provider.acl_header : policy} | |
82 if not location: | |
83 location = Location.DEFAULT | |
84 location_elem = ('<LocationConstraint>%s</LocationConstraint>' | |
85 % location) | |
86 if storage_class: | |
87 storage_class_elem = ('<StorageClass>%s</StorageClass>' | |
88 % storage_class) | |
89 else: | |
90 storage_class_elem = '' | |
91 data = ('<CreateBucketConfiguration>%s%s</CreateBucketConfiguration>' | |
92 % (location_elem, storage_class_elem)) | |
93 response = self.make_request( | |
94 'PUT', get_utf8_value(bucket_name), headers=headers, | |
95 data=get_utf8_value(data)) | |
96 body = response.read() | |
97 if response.status == 409: | |
98 raise self.provider.storage_create_error( | |
99 response.status, response.reason, body) | |
100 if response.status == 200: | |
101 return self.bucket_class(self, bucket_name) | |
102 else: | |
103 raise self.provider.storage_response_error( | |
104 response.status, response.reason, body) | |
105 | |
106 def get_bucket(self, bucket_name, validate=True, headers=None): | |
107 """ | |
108 Retrieves a bucket by name. | |
109 | |
110 If the bucket does not exist, an ``S3ResponseError`` will be raised. If | |
111 you are unsure if the bucket exists or not, you can use the | |
112 ``S3Connection.lookup`` method, which will either return a valid bucket | |
113 or ``None``. | |
114 | |
115 :type bucket_name: string | |
116 :param bucket_name: The name of the bucket | |
117 | |
118 :type headers: dict | |
119 :param headers: Additional headers to pass along with the request to | |
120 AWS. | |
121 | |
122 :type validate: boolean | |
123 :param validate: If ``True``, it will try to fetch all keys within the | |
124 given bucket. (Default: ``True``) | |
125 """ | |
126 bucket = self.bucket_class(self, bucket_name) | |
127 if validate: | |
128 bucket.get_all_keys(headers, maxkeys=0) | |
129 return bucket |