Mercurial > repos > iuc > data_manager_fetch_busco
diff data_manager/data_manager.py @ 2:2b4526fdf7fb draft default tip
planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_fetch_busco/ commit 2d2c72dc464b938bfa4def2511ce0938f3a1ea7d
author | iuc |
---|---|
date | Mon, 24 Apr 2023 12:26:46 +0000 |
parents | 15b97817550a |
children |
line wrap: on
line diff
--- a/data_manager/data_manager.py Sun Nov 22 12:47:50 2020 +0000 +++ b/data_manager/data_manager.py Mon Apr 24 12:26:46 2023 +0000 @@ -1,71 +1,47 @@ +#!/usr/bin/env python +# +# Data manager for reference data for the 'BUSCO' Galaxy tools import argparse import datetime import json import os import shutil -import tarfile -import zipfile -try: - # For Python 3.0 and later - from urllib.request import Request, urlopen -except ImportError: - # Fall back to Python 2 imports - from urllib2 import Request, urlopen - - -def url_download(url, workdir): - file_path = os.path.join(workdir, 'download.dat') - if not os.path.exists(workdir): - os.makedirs(workdir) - src = None - dst = None - try: - req = Request(url) - src = urlopen(req) - with open(file_path, 'wb') as dst: - while True: - chunk = src.read(2**10) - if chunk: - dst.write(chunk) - else: - break - finally: - if src: - src.close() - if tarfile.is_tarfile(file_path): - fh = tarfile.open(file_path, 'r:*') - elif zipfile.is_zipfile(file_path): - fh = zipfile.ZipFile(file_path, 'r') - else: - return - fh.extractall(workdir) - os.remove(file_path) +import subprocess +from pathlib import Path def main(args): - workdir = os.path.join(os.getcwd(), 'busco') - url_download(args.url, workdir) + workdir = os.path.join(os.getcwd(), "busco_downloads") + cmd = "busco --download %s" % args.database + subprocess.check_call(cmd, shell=True) + with open(args.json) as fh: + params = json.load(fh) + target_directory = params["output_data"][0]["extra_files_path"] data_manager_entry = {} - data_manager_entry['value'] = args.name.lower() - data_manager_entry['name'] = args.name - data_manager_entry['path'] = '.' - data_manager_json = dict(data_tables=dict(busco=data_manager_entry)) - with open(args.output) as fh: - params = json.load(fh) - target_directory = params['output_data'][0]['extra_files_path'] + data_manager_entry["value"] = args.name.lower() + data_manager_entry["name"] = args.name + data_manager_entry["version"] = args.version + data_manager_entry["path"] = str(Path(target_directory)) + data_manager_json = dict(data_tables=dict(busco_database=data_manager_entry)) + os.mkdir(target_directory) - output_path = os.path.abspath(os.path.join(os.getcwd(), 'busco')) + output_path = os.path.abspath(os.path.join(os.getcwd(), "busco_downloads")) for filename in os.listdir(workdir): shutil.move(os.path.join(output_path, filename), target_directory) - with open(args.output, 'w') as fh: + with open(args.json, "w") as fh: json.dump(data_manager_json, fh, sort_keys=True) -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Create data manager json.') - parser.add_argument('--out', dest='output', action='store', help='JSON filename') - parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID') - parser.add_argument('--url', dest='url', action='store', help='Download URL') +if __name__ == "__main__": + + # Read command line + parser = argparse.ArgumentParser(description="Download BUSCO database") + parser.add_argument("--database", help="Database name") + parser.add_argument( + "--name", default=str(datetime.date.today()), help="Data table entry unique ID" + ) + parser.add_argument("--version", help="BUSCO version") + parser.add_argument("--json", help="Path to JSON file") args = parser.parse_args() main(args)