Mercurial > repos > shellac > guppy_basecaller
comparison env/lib/python3.7/site-packages/boto/storage_uri.py @ 0:26e78fe6e8c4 draft
"planemo upload commit c699937486c35866861690329de38ec1a5d9f783"
author | shellac |
---|---|
date | Sat, 02 May 2020 07:14:21 -0400 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:26e78fe6e8c4 |
---|---|
1 # Copyright 2010 Google Inc. | |
2 # Copyright (c) 2011, Nexenta Systems Inc. | |
3 # | |
4 # Permission is hereby granted, free of charge, to any person obtaining a | |
5 # copy of this software and associated documentation files (the | |
6 # "Software"), to deal in the Software without restriction, including | |
7 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
8 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
9 # persons to whom the Software is furnished to do so, subject to the fol- | |
10 # lowing conditions: | |
11 # | |
12 # The above copyright notice and this permission notice shall be included | |
13 # in all copies or substantial portions of the Software. | |
14 # | |
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
21 # IN THE SOFTWARE. | |
22 | |
23 import boto | |
24 import os | |
25 import sys | |
26 import textwrap | |
27 from boto.s3.deletemarker import DeleteMarker | |
28 from boto.exception import BotoClientError | |
29 from boto.exception import InvalidUriError | |
30 | |
31 | |
32 class StorageUri(object): | |
33 """ | |
34 Base class for representing storage provider-independent bucket and | |
35 object name with a shorthand URI-like syntax. | |
36 | |
37 This is an abstract class: the constructor cannot be called (throws an | |
38 exception if you try). | |
39 """ | |
40 | |
41 connection = None | |
42 # Optional args that can be set from one of the concrete subclass | |
43 # constructors, to change connection behavior (e.g., to override | |
44 # https_connection_factory). | |
45 connection_args = None | |
46 | |
47 # Map of provider scheme ('s3' or 'gs') to AWSAuthConnection object. We | |
48 # maintain a pool here in addition to the connection pool implemented | |
49 # in AWSAuthConnection because the latter re-creates its connection pool | |
50 # every time that class is instantiated (so the current pool is used to | |
51 # avoid re-instantiating AWSAuthConnection). | |
52 provider_pool = {} | |
53 | |
54 def __init__(self): | |
55 """Uncallable constructor on abstract base StorageUri class. | |
56 """ | |
57 raise BotoClientError('Attempt to instantiate abstract StorageUri ' | |
58 'class') | |
59 | |
60 def __repr__(self): | |
61 """Returns string representation of URI.""" | |
62 return self.uri | |
63 | |
64 def equals(self, uri): | |
65 """Returns true if two URIs are equal.""" | |
66 return self.uri == uri.uri | |
67 | |
68 def check_response(self, resp, level, uri): | |
69 if resp is None: | |
70 raise InvalidUriError('\n'.join(textwrap.wrap( | |
71 'Attempt to get %s for "%s" failed. This can happen if ' | |
72 'the URI refers to a non-existent object or if you meant to ' | |
73 'operate on a directory (e.g., leaving off -R option on gsutil ' | |
74 'cp, mv, or ls of a bucket)' % (level, uri), 80))) | |
75 | |
76 def _check_bucket_uri(self, function_name): | |
77 if issubclass(type(self), BucketStorageUri) and not self.bucket_name: | |
78 raise InvalidUriError( | |
79 '%s on bucket-less URI (%s)' % (function_name, self.uri)) | |
80 | |
81 def _check_object_uri(self, function_name): | |
82 if issubclass(type(self), BucketStorageUri) and not self.object_name: | |
83 raise InvalidUriError('%s on object-less URI (%s)' % | |
84 (function_name, self.uri)) | |
85 | |
86 def _warn_about_args(self, function_name, **args): | |
87 for arg in args: | |
88 if args[arg]: | |
89 sys.stderr.write( | |
90 'Warning: %s ignores argument: %s=%s\n' % | |
91 (function_name, arg, str(args[arg]))) | |
92 | |
93 def connect(self, access_key_id=None, secret_access_key=None, **kwargs): | |
94 """ | |
95 Opens a connection to appropriate provider, depending on provider | |
96 portion of URI. Requires Credentials defined in boto config file (see | |
97 boto/pyami/config.py). | |
98 @type storage_uri: StorageUri | |
99 @param storage_uri: StorageUri specifying a bucket or a bucket+object | |
100 @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} | |
101 @return: A connection to storage service provider of the given URI. | |
102 """ | |
103 connection_args = dict(self.connection_args or ()) | |
104 | |
105 if (hasattr(self, 'suppress_consec_slashes') and | |
106 'suppress_consec_slashes' not in connection_args): | |
107 connection_args['suppress_consec_slashes'] = ( | |
108 self.suppress_consec_slashes) | |
109 connection_args.update(kwargs) | |
110 if not self.connection: | |
111 if self.scheme in self.provider_pool: | |
112 self.connection = self.provider_pool[self.scheme] | |
113 elif self.scheme == 's3': | |
114 from boto.s3.connection import S3Connection | |
115 self.connection = S3Connection(access_key_id, | |
116 secret_access_key, | |
117 **connection_args) | |
118 self.provider_pool[self.scheme] = self.connection | |
119 elif self.scheme == 'gs': | |
120 from boto.gs.connection import GSConnection | |
121 # Use OrdinaryCallingFormat instead of boto-default | |
122 # SubdomainCallingFormat because the latter changes the hostname | |
123 # that's checked during cert validation for HTTPS connections, | |
124 # which will fail cert validation (when cert validation is | |
125 # enabled). | |
126 # | |
127 # The same is not true for S3's HTTPS certificates. In fact, | |
128 # we don't want to do this for S3 because S3 requires the | |
129 # subdomain to match the location of the bucket. If the proper | |
130 # subdomain is not used, the server will return a 301 redirect | |
131 # with no Location header. | |
132 # | |
133 # Note: the following import can't be moved up to the | |
134 # start of this file else it causes a config import failure when | |
135 # run from the resumable upload/download tests. | |
136 from boto.s3.connection import OrdinaryCallingFormat | |
137 connection_args['calling_format'] = OrdinaryCallingFormat() | |
138 self.connection = GSConnection(access_key_id, | |
139 secret_access_key, | |
140 **connection_args) | |
141 self.provider_pool[self.scheme] = self.connection | |
142 elif self.scheme == 'file': | |
143 from boto.file.connection import FileConnection | |
144 self.connection = FileConnection(self) | |
145 else: | |
146 raise InvalidUriError('Unrecognized scheme "%s"' % | |
147 self.scheme) | |
148 self.connection.debug = self.debug | |
149 return self.connection | |
150 | |
151 def has_version(self): | |
152 return (issubclass(type(self), BucketStorageUri) | |
153 and ((self.version_id is not None) | |
154 or (self.generation is not None))) | |
155 | |
156 def delete_key(self, validate=False, headers=None, version_id=None, | |
157 mfa_token=None): | |
158 self._check_object_uri('delete_key') | |
159 bucket = self.get_bucket(validate, headers) | |
160 return bucket.delete_key(self.object_name, headers, version_id, | |
161 mfa_token) | |
162 | |
163 def list_bucket(self, prefix='', delimiter='', headers=None, | |
164 all_versions=False): | |
165 self._check_bucket_uri('list_bucket') | |
166 bucket = self.get_bucket(headers=headers) | |
167 if all_versions: | |
168 return (v for v in bucket.list_versions( | |
169 prefix=prefix, delimiter=delimiter, headers=headers) | |
170 if not isinstance(v, DeleteMarker)) | |
171 else: | |
172 return bucket.list(prefix=prefix, delimiter=delimiter, | |
173 headers=headers) | |
174 | |
175 def get_all_keys(self, validate=False, headers=None, prefix=None): | |
176 bucket = self.get_bucket(validate, headers) | |
177 return bucket.get_all_keys(headers) | |
178 | |
179 def get_bucket(self, validate=False, headers=None): | |
180 self._check_bucket_uri('get_bucket') | |
181 conn = self.connect() | |
182 bucket = conn.get_bucket(self.bucket_name, validate, headers) | |
183 self.check_response(bucket, 'bucket', self.uri) | |
184 return bucket | |
185 | |
186 def get_key(self, validate=False, headers=None, version_id=None): | |
187 self._check_object_uri('get_key') | |
188 bucket = self.get_bucket(validate, headers) | |
189 key = bucket.get_key(self.object_name, headers, version_id) | |
190 self.check_response(key, 'key', self.uri) | |
191 return key | |
192 | |
193 def new_key(self, validate=False, headers=None): | |
194 self._check_object_uri('new_key') | |
195 bucket = self.get_bucket(validate, headers) | |
196 return bucket.new_key(self.object_name) | |
197 | |
198 def get_contents_to_stream(self, fp, headers=None, version_id=None): | |
199 self._check_object_uri('get_key') | |
200 self._warn_about_args('get_key', validate=False) | |
201 key = self.get_key(None, headers) | |
202 self.check_response(key, 'key', self.uri) | |
203 return key.get_contents_to_file(fp, headers, version_id=version_id) | |
204 | |
205 def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, | |
206 torrent=False, version_id=None, | |
207 res_download_handler=None, response_headers=None, | |
208 hash_algs=None): | |
209 self._check_object_uri('get_contents_to_file') | |
210 key = self.get_key(None, headers) | |
211 self.check_response(key, 'key', self.uri) | |
212 if hash_algs: | |
213 key.get_contents_to_file(fp, headers, cb, num_cb, torrent, | |
214 version_id, res_download_handler, | |
215 response_headers, | |
216 hash_algs=hash_algs) | |
217 else: | |
218 key.get_contents_to_file(fp, headers, cb, num_cb, torrent, | |
219 version_id, res_download_handler, | |
220 response_headers) | |
221 | |
222 def get_contents_as_string(self, validate=False, headers=None, cb=None, | |
223 num_cb=10, torrent=False, version_id=None): | |
224 self._check_object_uri('get_contents_as_string') | |
225 key = self.get_key(validate, headers) | |
226 self.check_response(key, 'key', self.uri) | |
227 return key.get_contents_as_string(headers, cb, num_cb, torrent, | |
228 version_id) | |
229 | |
230 def acl_class(self): | |
231 conn = self.connect() | |
232 acl_class = conn.provider.acl_class | |
233 self.check_response(acl_class, 'acl_class', self.uri) | |
234 return acl_class | |
235 | |
236 def canned_acls(self): | |
237 conn = self.connect() | |
238 canned_acls = conn.provider.canned_acls | |
239 self.check_response(canned_acls, 'canned_acls', self.uri) | |
240 return canned_acls | |
241 | |
242 | |
243 class BucketStorageUri(StorageUri): | |
244 """ | |
245 StorageUri subclass that handles bucket storage providers. | |
246 Callers should instantiate this class by calling boto.storage_uri(). | |
247 """ | |
248 | |
249 delim = '/' | |
250 capabilities = set([]) # A set of additional capabilities. | |
251 | |
252 def __init__(self, scheme, bucket_name=None, object_name=None, | |
253 debug=0, connection_args=None, suppress_consec_slashes=True, | |
254 version_id=None, generation=None, is_latest=False): | |
255 """Instantiate a BucketStorageUri from scheme,bucket,object tuple. | |
256 | |
257 @type scheme: string | |
258 @param scheme: URI scheme naming the storage provider (gs, s3, etc.) | |
259 @type bucket_name: string | |
260 @param bucket_name: bucket name | |
261 @type object_name: string | |
262 @param object_name: object name, excluding generation/version. | |
263 @type debug: int | |
264 @param debug: debug level to pass in to connection (range 0..2) | |
265 @type connection_args: map | |
266 @param connection_args: optional map containing args to be | |
267 passed to {S3,GS}Connection constructor (e.g., to override | |
268 https_connection_factory). | |
269 @param suppress_consec_slashes: If provided, controls whether | |
270 consecutive slashes will be suppressed in key paths. | |
271 @param version_id: Object version id (S3-specific). | |
272 @param generation: Object generation number (GCS-specific). | |
273 @param is_latest: boolean indicating that a versioned object is the | |
274 current version | |
275 | |
276 After instantiation the components are available in the following | |
277 fields: scheme, bucket_name, object_name, version_id, generation, | |
278 is_latest, versionless_uri, version_specific_uri, uri. | |
279 Note: If instantiated without version info, the string representation | |
280 for a URI stays versionless; similarly, if instantiated with version | |
281 info, the string representation for a URI stays version-specific. If you | |
282 call one of the uri.set_contents_from_xyz() methods, a specific object | |
283 version will be created, and its version-specific URI string can be | |
284 retrieved from version_specific_uri even if the URI was instantiated | |
285 without version info. | |
286 """ | |
287 | |
288 self.scheme = scheme | |
289 self.bucket_name = bucket_name | |
290 self.object_name = object_name | |
291 self.debug = debug | |
292 if connection_args: | |
293 self.connection_args = connection_args | |
294 self.suppress_consec_slashes = suppress_consec_slashes | |
295 self.version_id = version_id | |
296 self.generation = generation and int(generation) | |
297 self.is_latest = is_latest | |
298 self.is_version_specific = bool(self.generation) or bool(version_id) | |
299 self._build_uri_strings() | |
300 | |
301 def _build_uri_strings(self): | |
302 if self.bucket_name and self.object_name: | |
303 self.versionless_uri = '%s://%s/%s' % (self.scheme, self.bucket_name, | |
304 self.object_name) | |
305 if self.generation: | |
306 self.version_specific_uri = '%s#%s' % (self.versionless_uri, | |
307 self.generation) | |
308 elif self.version_id: | |
309 self.version_specific_uri = '%s#%s' % ( | |
310 self.versionless_uri, self.version_id) | |
311 if self.is_version_specific: | |
312 self.uri = self.version_specific_uri | |
313 else: | |
314 self.uri = self.versionless_uri | |
315 elif self.bucket_name: | |
316 self.uri = ('%s://%s/' % (self.scheme, self.bucket_name)) | |
317 else: | |
318 self.uri = ('%s://' % self.scheme) | |
319 | |
320 def _update_from_key(self, key): | |
321 self._update_from_values( | |
322 getattr(key, 'version_id', None), | |
323 getattr(key, 'generation', None), | |
324 getattr(key, 'is_latest', None), | |
325 getattr(key, 'md5', None)) | |
326 | |
327 def _update_from_values(self, version_id, generation, is_latest, md5): | |
328 self.version_id = version_id | |
329 self.generation = generation | |
330 self.is_latest = is_latest | |
331 self._build_uri_strings() | |
332 self.md5 = md5 | |
333 | |
334 def get_key(self, validate=False, headers=None, version_id=None): | |
335 self._check_object_uri('get_key') | |
336 bucket = self.get_bucket(validate, headers) | |
337 if self.get_provider().name == 'aws': | |
338 key = bucket.get_key(self.object_name, headers, | |
339 version_id=(version_id or self.version_id)) | |
340 elif self.get_provider().name == 'google': | |
341 key = bucket.get_key(self.object_name, headers, | |
342 generation=self.generation) | |
343 self.check_response(key, 'key', self.uri) | |
344 return key | |
345 | |
346 def delete_key(self, validate=False, headers=None, version_id=None, | |
347 mfa_token=None): | |
348 self._check_object_uri('delete_key') | |
349 bucket = self.get_bucket(validate, headers) | |
350 if self.get_provider().name == 'aws': | |
351 version_id = version_id or self.version_id | |
352 return bucket.delete_key(self.object_name, headers, version_id, | |
353 mfa_token) | |
354 elif self.get_provider().name == 'google': | |
355 return bucket.delete_key(self.object_name, headers, | |
356 generation=self.generation) | |
357 | |
358 def clone_replace_name(self, new_name): | |
359 """Instantiate a BucketStorageUri from the current BucketStorageUri, | |
360 but replacing the object_name. | |
361 | |
362 @type new_name: string | |
363 @param new_name: new object name | |
364 """ | |
365 self._check_bucket_uri('clone_replace_name') | |
366 return BucketStorageUri( | |
367 self.scheme, bucket_name=self.bucket_name, object_name=new_name, | |
368 debug=self.debug, | |
369 suppress_consec_slashes=self.suppress_consec_slashes) | |
370 | |
371 def clone_replace_key(self, key): | |
372 """Instantiate a BucketStorageUri from the current BucketStorageUri, by | |
373 replacing the object name with the object name and other metadata found | |
374 in the given Key object (including generation). | |
375 | |
376 @type key: Key | |
377 @param key: key for the new StorageUri to represent | |
378 """ | |
379 self._check_bucket_uri('clone_replace_key') | |
380 version_id = None | |
381 generation = None | |
382 is_latest = False | |
383 if hasattr(key, 'version_id'): | |
384 version_id = key.version_id | |
385 if hasattr(key, 'generation'): | |
386 generation = key.generation | |
387 if hasattr(key, 'is_latest'): | |
388 is_latest = key.is_latest | |
389 | |
390 return BucketStorageUri( | |
391 key.provider.get_provider_name(), | |
392 bucket_name=key.bucket.name, | |
393 object_name=key.name, | |
394 debug=self.debug, | |
395 suppress_consec_slashes=self.suppress_consec_slashes, | |
396 version_id=version_id, | |
397 generation=generation, | |
398 is_latest=is_latest) | |
399 | |
400 def get_acl(self, validate=False, headers=None, version_id=None): | |
401 """returns a bucket's acl""" | |
402 self._check_bucket_uri('get_acl') | |
403 bucket = self.get_bucket(validate, headers) | |
404 # This works for both bucket- and object- level ACLs (former passes | |
405 # key_name=None): | |
406 key_name = self.object_name or '' | |
407 if self.get_provider().name == 'aws': | |
408 version_id = version_id or self.version_id | |
409 acl = bucket.get_acl(key_name, headers, version_id) | |
410 else: | |
411 acl = bucket.get_acl(key_name, headers, generation=self.generation) | |
412 self.check_response(acl, 'acl', self.uri) | |
413 return acl | |
414 | |
415 def get_def_acl(self, validate=False, headers=None): | |
416 """returns a bucket's default object acl""" | |
417 self._check_bucket_uri('get_def_acl') | |
418 bucket = self.get_bucket(validate, headers) | |
419 acl = bucket.get_def_acl(headers) | |
420 self.check_response(acl, 'acl', self.uri) | |
421 return acl | |
422 | |
423 def get_cors(self, validate=False, headers=None): | |
424 """returns a bucket's CORS XML""" | |
425 self._check_bucket_uri('get_cors') | |
426 bucket = self.get_bucket(validate, headers) | |
427 cors = bucket.get_cors(headers) | |
428 self.check_response(cors, 'cors', self.uri) | |
429 return cors | |
430 | |
431 def set_cors(self, cors, validate=False, headers=None): | |
432 """sets or updates a bucket's CORS XML""" | |
433 self._check_bucket_uri('set_cors ') | |
434 bucket = self.get_bucket(validate, headers) | |
435 if self.scheme == 's3': | |
436 bucket.set_cors(cors, headers) | |
437 else: | |
438 bucket.set_cors(cors.to_xml(), headers) | |
439 | |
440 def get_location(self, validate=False, headers=None): | |
441 self._check_bucket_uri('get_location') | |
442 bucket = self.get_bucket(validate, headers) | |
443 return bucket.get_location(headers) | |
444 | |
445 def get_storage_class(self, validate=False, headers=None): | |
446 self._check_bucket_uri('get_storage_class') | |
447 # StorageClass is defined as a bucket and object param for GCS, but | |
448 # only as a key param for S3. | |
449 if self.scheme != 'gs': | |
450 raise ValueError('get_storage_class() not supported for %s ' | |
451 'URIs.' % self.scheme) | |
452 bucket = self.get_bucket(validate, headers) | |
453 return bucket.get_storage_class(headers) | |
454 | |
455 def set_storage_class(self, storage_class, validate=False, headers=None): | |
456 """Updates a bucket's storage class.""" | |
457 self._check_bucket_uri('set_storage_class') | |
458 # StorageClass is defined as a bucket and object param for GCS, but | |
459 # only as a key param for S3. | |
460 if self.scheme != 'gs': | |
461 raise ValueError('set_storage_class() not supported for %s ' | |
462 'URIs.' % self.scheme) | |
463 bucket = self.get_bucket(validate, headers) | |
464 bucket.set_storage_class(storage_class, headers) | |
465 | |
466 def get_subresource(self, subresource, validate=False, headers=None, | |
467 version_id=None): | |
468 self._check_bucket_uri('get_subresource') | |
469 bucket = self.get_bucket(validate, headers) | |
470 return bucket.get_subresource(subresource, self.object_name, headers, | |
471 version_id) | |
472 | |
473 def add_group_email_grant(self, permission, email_address, recursive=False, | |
474 validate=False, headers=None): | |
475 self._check_bucket_uri('add_group_email_grant') | |
476 if self.scheme != 'gs': | |
477 raise ValueError('add_group_email_grant() not supported for %s ' | |
478 'URIs.' % self.scheme) | |
479 if self.object_name: | |
480 if recursive: | |
481 raise ValueError('add_group_email_grant() on key-ful URI cannot ' | |
482 'specify recursive=True') | |
483 key = self.get_key(validate, headers) | |
484 self.check_response(key, 'key', self.uri) | |
485 key.add_group_email_grant(permission, email_address, headers) | |
486 elif self.bucket_name: | |
487 bucket = self.get_bucket(validate, headers) | |
488 bucket.add_group_email_grant(permission, email_address, recursive, | |
489 headers) | |
490 else: | |
491 raise InvalidUriError('add_group_email_grant() on bucket-less URI ' | |
492 '%s' % self.uri) | |
493 | |
494 def add_email_grant(self, permission, email_address, recursive=False, | |
495 validate=False, headers=None): | |
496 self._check_bucket_uri('add_email_grant') | |
497 if not self.object_name: | |
498 bucket = self.get_bucket(validate, headers) | |
499 bucket.add_email_grant(permission, email_address, recursive, | |
500 headers) | |
501 else: | |
502 key = self.get_key(validate, headers) | |
503 self.check_response(key, 'key', self.uri) | |
504 key.add_email_grant(permission, email_address) | |
505 | |
506 def add_user_grant(self, permission, user_id, recursive=False, | |
507 validate=False, headers=None): | |
508 self._check_bucket_uri('add_user_grant') | |
509 if not self.object_name: | |
510 bucket = self.get_bucket(validate, headers) | |
511 bucket.add_user_grant(permission, user_id, recursive, headers) | |
512 else: | |
513 key = self.get_key(validate, headers) | |
514 self.check_response(key, 'key', self.uri) | |
515 key.add_user_grant(permission, user_id) | |
516 | |
517 def list_grants(self, headers=None): | |
518 self._check_bucket_uri('list_grants ') | |
519 bucket = self.get_bucket(headers) | |
520 return bucket.list_grants(headers) | |
521 | |
522 def is_file_uri(self): | |
523 """Returns True if this URI names a file or directory.""" | |
524 return False | |
525 | |
526 def is_cloud_uri(self): | |
527 """Returns True if this URI names a bucket or object.""" | |
528 return True | |
529 | |
530 def names_container(self): | |
531 """ | |
532 Returns True if this URI names a directory or bucket. Will return | |
533 False for bucket subdirs; providing bucket subdir semantics needs to | |
534 be done by the caller (like gsutil does). | |
535 """ | |
536 return bool(not self.object_name) | |
537 | |
538 def names_singleton(self): | |
539 """Returns True if this URI names a file or object.""" | |
540 return bool(self.object_name) | |
541 | |
542 def names_directory(self): | |
543 """Returns True if this URI names a directory.""" | |
544 return False | |
545 | |
546 def names_provider(self): | |
547 """Returns True if this URI names a provider.""" | |
548 return bool(not self.bucket_name) | |
549 | |
550 def names_bucket(self): | |
551 """Returns True if this URI names a bucket.""" | |
552 return bool(self.bucket_name) and bool(not self.object_name) | |
553 | |
554 def names_file(self): | |
555 """Returns True if this URI names a file.""" | |
556 return False | |
557 | |
558 def names_object(self): | |
559 """Returns True if this URI names an object.""" | |
560 return self.names_singleton() | |
561 | |
562 def is_stream(self): | |
563 """Returns True if this URI represents input/output stream.""" | |
564 return False | |
565 | |
566 def create_bucket(self, headers=None, location='', policy=None, | |
567 storage_class=None): | |
568 self._check_bucket_uri('create_bucket ') | |
569 conn = self.connect() | |
570 # Pass storage_class param only if this is a GCS bucket. (In S3 the | |
571 # storage class is specified on the key object.) | |
572 if self.scheme == 'gs': | |
573 return conn.create_bucket(self.bucket_name, headers, location, policy, | |
574 storage_class) | |
575 else: | |
576 return conn.create_bucket(self.bucket_name, headers, location, policy) | |
577 | |
578 def delete_bucket(self, headers=None): | |
579 self._check_bucket_uri('delete_bucket') | |
580 conn = self.connect() | |
581 return conn.delete_bucket(self.bucket_name, headers) | |
582 | |
583 def get_all_buckets(self, headers=None): | |
584 conn = self.connect() | |
585 return conn.get_all_buckets(headers) | |
586 | |
587 def get_provider(self): | |
588 conn = self.connect() | |
589 provider = conn.provider | |
590 self.check_response(provider, 'provider', self.uri) | |
591 return provider | |
592 | |
593 def set_acl(self, acl_or_str, key_name='', validate=False, headers=None, | |
594 version_id=None, if_generation=None, if_metageneration=None): | |
595 """Sets or updates a bucket's ACL.""" | |
596 self._check_bucket_uri('set_acl') | |
597 key_name = key_name or self.object_name or '' | |
598 bucket = self.get_bucket(validate, headers) | |
599 if self.generation: | |
600 bucket.set_acl( | |
601 acl_or_str, key_name, headers, generation=self.generation, | |
602 if_generation=if_generation, if_metageneration=if_metageneration) | |
603 else: | |
604 version_id = version_id or self.version_id | |
605 bucket.set_acl(acl_or_str, key_name, headers, version_id) | |
606 | |
607 def set_xml_acl(self, xmlstring, key_name='', validate=False, headers=None, | |
608 version_id=None, if_generation=None, if_metageneration=None): | |
609 """Sets or updates a bucket's ACL with an XML string.""" | |
610 self._check_bucket_uri('set_xml_acl') | |
611 key_name = key_name or self.object_name or '' | |
612 bucket = self.get_bucket(validate, headers) | |
613 if self.generation: | |
614 bucket.set_xml_acl( | |
615 xmlstring, key_name, headers, generation=self.generation, | |
616 if_generation=if_generation, if_metageneration=if_metageneration) | |
617 else: | |
618 version_id = version_id or self.version_id | |
619 bucket.set_xml_acl(xmlstring, key_name, headers, | |
620 version_id=version_id) | |
621 | |
622 def set_def_xml_acl(self, xmlstring, validate=False, headers=None): | |
623 """Sets or updates a bucket's default object ACL with an XML string.""" | |
624 self._check_bucket_uri('set_def_xml_acl') | |
625 self.get_bucket(validate, headers).set_def_xml_acl(xmlstring, headers) | |
626 | |
627 def set_def_acl(self, acl_or_str, validate=False, headers=None, | |
628 version_id=None): | |
629 """Sets or updates a bucket's default object ACL.""" | |
630 self._check_bucket_uri('set_def_acl') | |
631 self.get_bucket(validate, headers).set_def_acl(acl_or_str, headers) | |
632 | |
633 def set_canned_acl(self, acl_str, validate=False, headers=None, | |
634 version_id=None): | |
635 """Sets or updates a bucket's acl to a predefined (canned) value.""" | |
636 self._check_object_uri('set_canned_acl') | |
637 self._warn_about_args('set_canned_acl', version_id=version_id) | |
638 key = self.get_key(validate, headers) | |
639 self.check_response(key, 'key', self.uri) | |
640 key.set_canned_acl(acl_str, headers) | |
641 | |
642 def set_def_canned_acl(self, acl_str, validate=False, headers=None, | |
643 version_id=None): | |
644 """Sets or updates a bucket's default object acl to a predefined | |
645 (canned) value.""" | |
646 self._check_bucket_uri('set_def_canned_acl ') | |
647 key = self.get_key(validate, headers) | |
648 self.check_response(key, 'key', self.uri) | |
649 key.set_def_canned_acl(acl_str, headers, version_id) | |
650 | |
651 def set_subresource(self, subresource, value, validate=False, headers=None, | |
652 version_id=None): | |
653 self._check_bucket_uri('set_subresource') | |
654 bucket = self.get_bucket(validate, headers) | |
655 bucket.set_subresource(subresource, value, self.object_name, headers, | |
656 version_id) | |
657 | |
658 def set_contents_from_string(self, s, headers=None, replace=True, | |
659 cb=None, num_cb=10, policy=None, md5=None, | |
660 reduced_redundancy=False): | |
661 self._check_object_uri('set_contents_from_string') | |
662 key = self.new_key(headers=headers) | |
663 if self.scheme == 'gs': | |
664 if reduced_redundancy: | |
665 sys.stderr.write('Warning: GCS does not support ' | |
666 'reduced_redundancy; argument ignored by ' | |
667 'set_contents_from_string') | |
668 result = key.set_contents_from_string( | |
669 s, headers, replace, cb, num_cb, policy, md5) | |
670 else: | |
671 result = key.set_contents_from_string( | |
672 s, headers, replace, cb, num_cb, policy, md5, | |
673 reduced_redundancy) | |
674 self._update_from_key(key) | |
675 return result | |
676 | |
677 def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, | |
678 num_cb=10, policy=None, md5=None, size=None, | |
679 rewind=False, res_upload_handler=None): | |
680 self._check_object_uri('set_contents_from_file') | |
681 key = self.new_key(headers=headers) | |
682 if self.scheme == 'gs': | |
683 result = key.set_contents_from_file( | |
684 fp, headers, replace, cb, num_cb, policy, md5, size=size, | |
685 rewind=rewind, res_upload_handler=res_upload_handler) | |
686 if res_upload_handler: | |
687 self._update_from_values(None, res_upload_handler.generation, | |
688 None, md5) | |
689 else: | |
690 self._warn_about_args('set_contents_from_file', | |
691 res_upload_handler=res_upload_handler) | |
692 result = key.set_contents_from_file( | |
693 fp, headers, replace, cb, num_cb, policy, md5, size=size, | |
694 rewind=rewind) | |
695 self._update_from_key(key) | |
696 return result | |
697 | |
698 def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None, | |
699 policy=None, reduced_redundancy=False): | |
700 self._check_object_uri('set_contents_from_stream') | |
701 dst_key = self.new_key(False, headers) | |
702 result = dst_key.set_contents_from_stream( | |
703 fp, headers, replace, cb, policy=policy, | |
704 reduced_redundancy=reduced_redundancy) | |
705 self._update_from_key(dst_key) | |
706 return result | |
707 | |
708 def copy_key(self, src_bucket_name, src_key_name, metadata=None, | |
709 src_version_id=None, storage_class='STANDARD', | |
710 preserve_acl=False, encrypt_key=False, headers=None, | |
711 query_args=None, src_generation=None): | |
712 """Returns newly created key.""" | |
713 self._check_object_uri('copy_key') | |
714 dst_bucket = self.get_bucket(validate=False, headers=headers) | |
715 if src_generation: | |
716 return dst_bucket.copy_key( | |
717 new_key_name=self.object_name, | |
718 src_bucket_name=src_bucket_name, | |
719 src_key_name=src_key_name, metadata=metadata, | |
720 storage_class=storage_class, preserve_acl=preserve_acl, | |
721 encrypt_key=encrypt_key, headers=headers, query_args=query_args, | |
722 src_generation=src_generation) | |
723 else: | |
724 return dst_bucket.copy_key( | |
725 new_key_name=self.object_name, | |
726 src_bucket_name=src_bucket_name, src_key_name=src_key_name, | |
727 metadata=metadata, src_version_id=src_version_id, | |
728 storage_class=storage_class, preserve_acl=preserve_acl, | |
729 encrypt_key=encrypt_key, headers=headers, query_args=query_args) | |
730 | |
731 def enable_logging(self, target_bucket, target_prefix=None, validate=False, | |
732 headers=None, version_id=None): | |
733 self._check_bucket_uri('enable_logging') | |
734 bucket = self.get_bucket(validate, headers) | |
735 bucket.enable_logging(target_bucket, target_prefix, headers=headers) | |
736 | |
737 def disable_logging(self, validate=False, headers=None, version_id=None): | |
738 self._check_bucket_uri('disable_logging') | |
739 bucket = self.get_bucket(validate, headers) | |
740 bucket.disable_logging(headers=headers) | |
741 | |
742 def get_logging_config(self, validate=False, headers=None, version_id=None): | |
743 self._check_bucket_uri('get_logging_config') | |
744 bucket = self.get_bucket(validate, headers) | |
745 return bucket.get_logging_config(headers=headers) | |
746 | |
747 def set_website_config(self, main_page_suffix=None, error_key=None, | |
748 validate=False, headers=None): | |
749 self._check_bucket_uri('set_website_config') | |
750 bucket = self.get_bucket(validate, headers) | |
751 if not (main_page_suffix or error_key): | |
752 bucket.delete_website_configuration(headers) | |
753 else: | |
754 bucket.configure_website(main_page_suffix, error_key, headers) | |
755 | |
756 def get_website_config(self, validate=False, headers=None): | |
757 self._check_bucket_uri('get_website_config') | |
758 bucket = self.get_bucket(validate, headers) | |
759 return bucket.get_website_configuration(headers) | |
760 | |
761 def get_versioning_config(self, headers=None): | |
762 self._check_bucket_uri('get_versioning_config') | |
763 bucket = self.get_bucket(False, headers) | |
764 return bucket.get_versioning_status(headers) | |
765 | |
766 def configure_versioning(self, enabled, headers=None): | |
767 self._check_bucket_uri('configure_versioning') | |
768 bucket = self.get_bucket(False, headers) | |
769 return bucket.configure_versioning(enabled, headers) | |
770 | |
771 def set_metadata(self, metadata_plus, metadata_minus, preserve_acl, | |
772 headers=None): | |
773 return self.get_key(False).set_remote_metadata(metadata_plus, | |
774 metadata_minus, | |
775 preserve_acl, | |
776 headers=headers) | |
777 | |
778 def compose(self, components, content_type=None, headers=None): | |
779 self._check_object_uri('compose') | |
780 component_keys = [] | |
781 for suri in components: | |
782 component_keys.append(suri.new_key()) | |
783 component_keys[-1].generation = suri.generation | |
784 self.generation = self.new_key().compose( | |
785 component_keys, content_type=content_type, headers=headers) | |
786 self._build_uri_strings() | |
787 return self | |
788 | |
789 def get_lifecycle_config(self, validate=False, headers=None): | |
790 """Returns a bucket's lifecycle configuration.""" | |
791 self._check_bucket_uri('get_lifecycle_config') | |
792 bucket = self.get_bucket(validate, headers) | |
793 lifecycle_config = bucket.get_lifecycle_config(headers) | |
794 self.check_response(lifecycle_config, 'lifecycle', self.uri) | |
795 return lifecycle_config | |
796 | |
797 def configure_lifecycle(self, lifecycle_config, validate=False, | |
798 headers=None): | |
799 """Sets or updates a bucket's lifecycle configuration.""" | |
800 self._check_bucket_uri('configure_lifecycle') | |
801 bucket = self.get_bucket(validate, headers) | |
802 bucket.configure_lifecycle(lifecycle_config, headers) | |
803 | |
804 def get_billing_config(self, headers=None): | |
805 self._check_bucket_uri('get_billing_config') | |
806 # billing is defined as a bucket param for GCS, but not for S3. | |
807 if self.scheme != 'gs': | |
808 raise ValueError('get_billing_config() not supported for %s ' | |
809 'URIs.' % self.scheme) | |
810 bucket = self.get_bucket(False, headers) | |
811 return bucket.get_billing_config(headers) | |
812 | |
813 def configure_billing(self, requester_pays=False, validate=False, | |
814 headers=None): | |
815 """Sets or updates a bucket's billing configuration.""" | |
816 self._check_bucket_uri('configure_billing') | |
817 # billing is defined as a bucket param for GCS, but not for S3. | |
818 if self.scheme != 'gs': | |
819 raise ValueError('configure_billing() not supported for %s ' | |
820 'URIs.' % self.scheme) | |
821 bucket = self.get_bucket(validate, headers) | |
822 bucket.configure_billing(requester_pays=requester_pays, headers=headers) | |
823 | |
824 def get_encryption_config(self, validate=False, headers=None): | |
825 """Returns a GCS bucket's encryption configuration.""" | |
826 self._check_bucket_uri('get_encryption_config') | |
827 # EncryptionConfiguration is defined as a bucket param for GCS, but not | |
828 # for S3. | |
829 if self.scheme != 'gs': | |
830 raise ValueError('get_encryption_config() not supported for %s ' | |
831 'URIs.' % self.scheme) | |
832 bucket = self.get_bucket(validate, headers) | |
833 return bucket.get_encryption_config(headers=headers) | |
834 | |
835 def set_encryption_config(self, default_kms_key_name=None, validate=False, | |
836 headers=None): | |
837 """Sets a GCS bucket's encryption configuration.""" | |
838 self._check_bucket_uri('set_encryption_config') | |
839 bucket = self.get_bucket(validate, headers) | |
840 bucket.set_encryption_config(default_kms_key_name=default_kms_key_name, | |
841 headers=headers) | |
842 | |
843 def exists(self, headers=None): | |
844 """Returns True if the object exists or False if it doesn't""" | |
845 if not self.object_name: | |
846 raise InvalidUriError('exists on object-less URI (%s)' % self.uri) | |
847 bucket = self.get_bucket(headers) | |
848 key = bucket.get_key(self.object_name, headers=headers) | |
849 return bool(key) | |
850 | |
851 | |
852 class FileStorageUri(StorageUri): | |
853 """ | |
854 StorageUri subclass that handles files in the local file system. | |
855 Callers should instantiate this class by calling boto.storage_uri(). | |
856 | |
857 See file/README about how we map StorageUri operations onto a file system. | |
858 """ | |
859 | |
860 delim = os.sep | |
861 | |
862 def __init__(self, object_name, debug, is_stream=False): | |
863 """Instantiate a FileStorageUri from a path name. | |
864 | |
865 @type object_name: string | |
866 @param object_name: object name | |
867 @type debug: boolean | |
868 @param debug: whether to enable debugging on this StorageUri | |
869 | |
870 After instantiation the components are available in the following | |
871 fields: uri, scheme, bucket_name (always blank for this "anonymous" | |
872 bucket), object_name. | |
873 """ | |
874 | |
875 self.scheme = 'file' | |
876 self.bucket_name = '' | |
877 self.object_name = object_name | |
878 self.uri = 'file://' + object_name | |
879 self.debug = debug | |
880 self.stream = is_stream | |
881 | |
882 def clone_replace_name(self, new_name): | |
883 """Instantiate a FileStorageUri from the current FileStorageUri, | |
884 but replacing the object_name. | |
885 | |
886 @type new_name: string | |
887 @param new_name: new object name | |
888 """ | |
889 return FileStorageUri(new_name, self.debug, self.stream) | |
890 | |
891 def is_file_uri(self): | |
892 """Returns True if this URI names a file or directory.""" | |
893 return True | |
894 | |
895 def is_cloud_uri(self): | |
896 """Returns True if this URI names a bucket or object.""" | |
897 return False | |
898 | |
899 def names_container(self): | |
900 """Returns True if this URI names a directory or bucket.""" | |
901 return self.names_directory() | |
902 | |
903 def names_singleton(self): | |
904 """Returns True if this URI names a file (or stream) or object.""" | |
905 return not self.names_container() | |
906 | |
907 def names_directory(self): | |
908 """Returns True if this URI names a directory.""" | |
909 if self.stream: | |
910 return False | |
911 return os.path.isdir(self.object_name) | |
912 | |
913 def names_provider(self): | |
914 """Returns True if this URI names a provider.""" | |
915 return False | |
916 | |
917 def names_bucket(self): | |
918 """Returns True if this URI names a bucket.""" | |
919 return False | |
920 | |
921 def names_file(self): | |
922 """Returns True if this URI names a file.""" | |
923 return self.names_singleton() | |
924 | |
925 def names_object(self): | |
926 """Returns True if this URI names an object.""" | |
927 return False | |
928 | |
929 def is_stream(self): | |
930 """Returns True if this URI represents input/output stream. | |
931 """ | |
932 return bool(self.stream) | |
933 | |
934 def close(self): | |
935 """Closes the underlying file. | |
936 """ | |
937 self.get_key().close() | |
938 | |
939 def exists(self, _headers_not_used=None): | |
940 """Returns True if the file exists or False if it doesn't""" | |
941 # The _headers_not_used parameter is ignored. It is only there to ensure | |
942 # that this method's signature is identical to the exists method on the | |
943 # BucketStorageUri class. | |
944 return os.path.exists(self.object_name) |