Mercurial > repos > scottx611x > data_manager_fetch_gene_annotation
comparison data_manager/data_manager.py @ 47:3a02b8ab423a draft
"planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_fetch_gene_annotation/ commit 02d2967f77e3fa5a18aea63dc84aa9ab418dc165"
| author | iuc |
|---|---|
| date | Sun, 22 Nov 2020 12:48:13 +0000 |
| parents | 9346d2955707 |
| children |
comparison
equal
deleted
inserted
replaced
| 46:9346d2955707 | 47:3a02b8ab423a |
|---|---|
| 1 # -*- coding: utf-8 -*- | |
| 1 import argparse | 2 import argparse |
| 3 import bz2 | |
| 2 import datetime | 4 import datetime |
| 5 import gzip | |
| 3 import json | 6 import json |
| 4 import os | 7 import os |
| 8 import shutil | |
| 5 import sys | 9 import sys |
| 6 import uuid | 10 import uuid |
| 11 import zipfile | |
| 7 | 12 |
| 8 import requests | 13 |
| 9 from requests.exceptions import ContentDecodingError | 14 # Nice solution to opening compressed files (zip/bz2/gz) transparently |
| 15 # https://stackoverflow.com/a/13045892/638445 | |
| 16 | |
| 17 class CompressedFile(object): | |
| 18 magic = None | |
| 19 file_type = None | |
| 20 mime_type = None | |
| 21 proper_extension = None | |
| 22 | |
| 23 def __init__(self, f): | |
| 24 # f is an open file or file like object | |
| 25 self.f = f | |
| 26 self.accessor = self.open() | |
| 27 | |
| 28 @classmethod | |
| 29 def is_magic(self, data): | |
| 30 return data.startswith(self.magic) | |
| 31 | |
| 32 def open(self): | |
| 33 return None | |
| 34 | |
| 35 | |
| 36 class ZIPFile(CompressedFile): | |
| 37 magic = '\x50\x4b\x03\x04' | |
| 38 file_type = 'zip' | |
| 39 mime_type = 'compressed/zip' | |
| 40 | |
| 41 def open(self): | |
| 42 return zipfile.ZipFile(self.f) | |
| 43 | |
| 44 | |
| 45 class BZ2File(CompressedFile): | |
| 46 magic = '\x42\x5a\x68' | |
| 47 file_type = 'bz2' | |
| 48 mime_type = 'compressed/bz2' | |
| 49 | |
| 50 def open(self): | |
| 51 return bz2.BZ2File(self.f) | |
| 52 | |
| 53 | |
| 54 class GZFile(CompressedFile): | |
| 55 magic = '\x1f\x8b\x08' | |
| 56 file_type = 'gz' | |
| 57 mime_type = 'compressed/gz' | |
| 58 | |
| 59 def open(self): | |
| 60 return gzip.GzipFile(self.f) | |
| 61 | |
| 62 | |
| 63 # factory function to create a suitable instance for accessing files | |
| 64 def get_compressed_file(filename): | |
| 65 with open(filename, 'rb') as f: | |
| 66 start_of_file = f.read(1024) | |
| 67 f.seek(0) | |
| 68 for cls in (ZIPFile, BZ2File, GZFile): | |
| 69 if cls.is_magic(start_of_file): | |
| 70 f.close() | |
| 71 return cls(filename) | |
| 72 | |
| 73 return None | |
| 74 | |
| 75 | |
| 76 try: | |
| 77 # For Python 3.0 and later | |
| 78 from urllib.request import urlretrieve | |
| 79 except ImportError: | |
| 80 # Fall back to Python 2's urllib2 | |
| 81 from urllib import urlretrieve | |
| 10 | 82 |
| 11 | 83 |
| 12 def url_download(url): | 84 def url_download(url): |
| 13 """Attempt to download gene annotation file from a given url | 85 """Attempt to download gene annotation file from a given url |
| 14 :param url: full url to gene annotation file | 86 :param url: full url to gene annotation file |
| 15 :type url: str. | 87 :type url: str. |
| 16 :returns: name of downloaded gene annotation file | 88 :returns: name of downloaded gene annotation file |
| 17 :raises: ContentDecodingError, IOError | 89 :raises: ContentDecodingError, IOError |
| 18 """ | 90 """ |
| 19 response = requests.get(url=url, stream=True) | |
| 20 | 91 |
| 21 # Generate file_name | 92 # Generate file_name |
| 22 file_name = response.url.split("/")[-1] | 93 file_name = url.split('/')[-1] |
| 23 | 94 |
| 24 block_size = 10 * 1024 * 1024 # 10MB chunked download | 95 try: |
| 25 with open(file_name, 'w+') as f: | 96 # download URL (FTP and HTTP work, probably local and data too) |
| 26 try: | 97 urlretrieve(url, file_name) |
| 27 # Good to note here that requests' iter_content() will | 98 |
| 28 # automatically handle decoding "gzip" and "deflate" encoding | 99 # uncompress file if needed |
| 29 # formats | 100 cf = get_compressed_file(file_name) |
| 30 for buf in response.iter_content(block_size): | 101 if cf is not None: |
| 31 f.write(buf) | 102 uncompressed_file_name = os.path.splitext(file_name)[0] |
| 32 except (ContentDecodingError, IOError) as e: | 103 with open(uncompressed_file_name, 'w+') as uncompressed_file: |
| 33 sys.stderr.write("Error occured downloading reference file: %s" | 104 shutil.copyfileobj(cf.accessor, uncompressed_file) |
| 34 % e) | |
| 35 os.remove(file_name) | 105 os.remove(file_name) |
| 36 | 106 file_name = uncompressed_file_name |
| 107 except IOError as e: | |
| 108 sys.stderr.write('Error occured downloading reference file: %s' % e) | |
| 109 os.remove(file_name) | |
| 37 return file_name | 110 return file_name |
| 38 | 111 |
| 39 | 112 |
| 40 def main(): | 113 def main(): |
| 41 parser = argparse.ArgumentParser(description='Create data manager JSON.') | 114 parser = argparse.ArgumentParser(description='Create data manager JSON.') |
| 42 parser.add_argument('--out', dest='output', action='store', | 115 parser.add_argument('--out', dest='output', action='store', help='JSON filename') |
| 43 help='JSON filename') | 116 parser.add_argument('--name', dest='name', action='store', default=uuid.uuid4(), help='Data table entry unique ID') |
| 44 parser.add_argument('--name', dest='name', action='store', | 117 parser.add_argument('--url', dest='url', action='store', help='Url to download gtf file from') |
| 45 default=uuid.uuid4(), help='Data table entry unique ID' | |
| 46 ) | |
| 47 parser.add_argument('--url', dest='url', action='store', | |
| 48 help='Url to download gtf file from') | |
| 49 | 118 |
| 50 args = parser.parse_args() | 119 args = parser.parse_args() |
| 51 | 120 |
| 52 work_dir = os.getcwd() | 121 work_dir = os.getcwd() |
| 53 | 122 |
| 64 'path': os.path.join(work_dir, gene_annotation_file_name) | 133 'path': os.path.join(work_dir, gene_annotation_file_name) |
| 65 } | 134 } |
| 66 } | 135 } |
| 67 } | 136 } |
| 68 | 137 |
| 69 with open(os.path.join(args.output), "w+") as f: | 138 with open(os.path.join(args.output), 'w+') as fh: |
| 70 f.write(json.dumps(data_manager_entry)) | 139 json.dump(data_manager_entry, fh, sort_keys=True) |
| 71 | 140 |
| 72 | 141 |
| 73 if __name__ == '__main__': | 142 if __name__ == '__main__': |
| 74 main() | 143 main() |
