# HG changeset patch # User eric-rasche # Date 1417808280 18000 # Node ID a90d7b00d72784adc51e8fbe234384e2442857a9 # Parent ba70ea57cef9443685a6e05a37c4f70752693245 Removed testing code diff -r ba70ea57cef9 -r a90d7b00d727 gsaf_download.py --- a/gsaf_download.py Fri Dec 05 14:34:49 2014 -0500 +++ b/gsaf_download.py Fri Dec 05 14:38:00 2014 -0500 @@ -40,14 +40,14 @@ data = json.loads(gsafjson) log.info("Fetching %s" % data['filename']) file_path = os.path.join('output', data['filename']) - #urllib.urlretrieve(data['url'], file_path) + urllib.urlretrieve(data['url'], file_path) log.info("Hashing file") - #file_md5 = hashlib.md5(open(file_path).read()).hexdigest() - #log.debug("Hashed to %s" % file_md5) + file_md5 = hashlib.md5(open(file_path).read()).hexdigest() + log.debug("Hashed to %s" % file_md5) stderr = '' - #if file_md5 != data['md5']: - #stderr = 'md5sum mismatch: %s != %s' % (file_md5, data['md5']) + if file_md5 != data['md5']: + stderr = 'md5sum mismatch: %s != %s' % (file_md5, data['md5']) # Galaxy.json # {"name": "lambda.fa", "stdout": "uploaded fasta file", "line_count": 811, "ext": "fasta", "dataset_id": 16220, "type": "dataset"} @@ -65,10 +65,10 @@ 'type': 'dataset' } - #try: - #subprocess.check_call(['gunzip', file_path]) - #except: - #log.error("Couldn't extract %s" % data['filename']) + try: + subprocess.check_call(['gunzip', file_path]) + except: + log.error("Couldn't extract %s" % data['filename']) gx_json.write(json.dumps(galaxy_json) + "\n")