Mercurial > repos > bgruening > data_manager_diamond_database_builder
changeset 1:5a0d0bee4f8d draft
"planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/data_managers/data_manager_diamond_database_builder commit b2d290a8b609ebbc7f4b93716370143c41062ad4"
author | bgruening |
---|---|
date | Tue, 03 Dec 2019 17:39:48 -0500 |
parents | ce62d0912b10 |
children | 5558f74bd296 |
files | .shed.yml data_manager/data_manager_diamond_database_builder.py data_manager/data_manager_diamond_database_builder.xml test-data/diamond_data_manager.json test-data/diamond_database.loc test-data/phiX174.fasta tool_data_table_conf.xml.test tool_dependencies.xml |
diffstat | 8 files changed, 236 insertions(+), 122 deletions(-) [+] |
line wrap: on
line diff
--- a/.shed.yml Sun Feb 08 10:05:20 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +0,0 @@ -owner: bgruening -name: data_manager_diamond_database_builder
--- a/data_manager/data_manager_diamond_database_builder.py Sun Feb 08 10:05:20 2015 -0500 +++ b/data_manager/data_manager_diamond_database_builder.py Tue Dec 03 17:39:48 2019 -0500 @@ -1,4 +1,5 @@ #!/usr/bin/env python +import json import sys import os import tempfile @@ -12,13 +13,12 @@ import gzip import bz2 -from galaxy.util.json import from_json_string, to_json_string +CHUNK_SIZE = 2**20 # 1mb -CHUNK_SIZE = 2**20 #1mb -def cleanup_before_exit( tmp_dir ): - if tmp_dir and os.path.exists( tmp_dir ): - shutil.rmtree( tmp_dir ) +def cleanup_before_exit(tmp_dir): + if tmp_dir and os.path.exists(tmp_dir): + shutil.rmtree(tmp_dir) def stop_err(msg): @@ -26,146 +26,146 @@ sys.exit(1) -def _get_files_in_ftp_path( ftp, path ): +def _get_files_in_ftp_path(ftp, path): path_contents = [] - ftp.retrlines( 'MLSD %s' % ( path ), path_contents.append ) - return [ line.split( ';' )[ -1 ].lstrip() for line in path_contents ] + ftp.retrlines('MLSD %s' % (path), path_contents.append) + return [line.split(';')[-1].lstrip() for line in path_contents] -def _get_stream_readers_for_tar( file_obj, tmp_dir ): - fasta_tar = tarfile.open( fileobj=file_obj, mode='r:*' ) - return [ fasta_tar.extractfile( member ) for member in fasta_tar.getmembers() ] +def _get_stream_readers_for_tar(file_obj, tmp_dir): + fasta_tar = tarfile.open(fileobj=file_obj, mode='r:*') + return [fasta_tar.extractfile(member) for member in fasta_tar.getmembers()] -def _get_stream_readers_for_zip( file_obj, tmp_dir ): - fasta_zip = zipfile.ZipFile( file_obj, 'r' ) +def _get_stream_readers_for_zip(file_obj, tmp_dir): + fasta_zip = zipfile.ZipFile(file_obj, 'r') rval = [] for member in fasta_zip.namelist(): - fasta_zip.extract( member, tmp_dir ) - rval.append( open( os.path.join( tmp_dir, member ), 'rb' ) ) + fasta_zip.extract(member, tmp_dir) + rval.append(open(os.path.join(tmp_dir, member), 'rb')) return rval -def _get_stream_readers_for_gzip( file_obj, tmp_dir ): - return [ gzip.GzipFile( fileobj=file_obj, mode='rb' ) ] +def _get_stream_readers_for_gzip(file_obj, tmp_dir): + return [gzip.GzipFile(fileobj=file_obj, mode='rb')] -def _get_stream_readers_for_bz2( file_obj, tmp_dir ): - return [ bz2.BZ2File( file_obj.name, 'rb' ) ] +def _get_stream_readers_for_bz2(file_obj, tmp_dir): + return [bz2.BZ2File(file_obj.name, 'rb')] -def download_from_ncbi( data_manager_dict, params, target_directory, database_id, database_name ): +def download_from_ncbi(data_manager_dict, params, target_directory, database_id, database_name): NCBI_FTP_SERVER = 'ftp.ncbi.nlm.nih.gov' NCBI_DOWNLOAD_PATH = '/blast/db/FASTA/' - COMPRESSED_EXTENSIONS = [ ( '.tar.gz', _get_stream_readers_for_tar ), ( '.tar.bz2', _get_stream_readers_for_tar ), ( '.zip', _get_stream_readers_for_zip ), ( '.gz', _get_stream_readers_for_gzip ), ( '.bz2', _get_stream_readers_for_bz2 ) ] + COMPRESSED_EXTENSIONS = [('.tar.gz', _get_stream_readers_for_tar), ('.tar.bz2', _get_stream_readers_for_tar), ('.zip', _get_stream_readers_for_zip), ('.gz', _get_stream_readers_for_gzip), ('.bz2', _get_stream_readers_for_bz2)] ncbi_identifier = params['param_dict']['reference_source']['requested_identifier'] - ftp = FTP( NCBI_FTP_SERVER ) + ftp = FTP(NCBI_FTP_SERVER) ftp.login() - path_contents = _get_files_in_ftp_path( ftp, NCBI_DOWNLOAD_PATH ) + path_contents = _get_files_in_ftp_path(ftp, NCBI_DOWNLOAD_PATH) ncbi_file_name = None get_stream_reader = None ext = None for ext, get_stream_reader in COMPRESSED_EXTENSIONS: - if "%s%s" % ( ncbi_identifier, ext ) in path_contents: - ncbi_file_name = "%s%s%s" % ( NCBI_DOWNLOAD_PATH, ncbi_identifier, ext ) + if "%s%s" % (ncbi_identifier, ext) in path_contents: + ncbi_file_name = "%s%s%s" % (NCBI_DOWNLOAD_PATH, ncbi_identifier, ext) break if not ncbi_file_name: - raise Exception( 'Unable to determine filename for NCBI database for %s: %s' % ( ncbi_identifier, path_contents ) ) + raise Exception('Unable to determine filename for NCBI database for %s: %s' % (ncbi_identifier, path_contents)) - tmp_dir = tempfile.mkdtemp( prefix='tmp-data-manager-ncbi-' ) - ncbi_fasta_filename = os.path.join( tmp_dir, "%s%s" % ( ncbi_identifier, ext ) ) - + tmp_dir = tempfile.mkdtemp(prefix='tmp-data-manager-ncbi-') + ncbi_fasta_filename = os.path.join(tmp_dir, "%s%s" % (ncbi_identifier, ext)) + fasta_base_filename = "%s.fa" % database_id - fasta_filename = os.path.join( target_directory, fasta_base_filename ) - fasta_writer = open( fasta_filename, 'wb+' ) - - tmp_extract_dir = os.path.join ( tmp_dir, 'extracted_fasta' ) - os.mkdir( tmp_extract_dir ) - - tmp_fasta = open( ncbi_fasta_filename, 'wb+' ) - - ftp.retrbinary( 'RETR %s' % ncbi_file_name, tmp_fasta.write ) - + fasta_filename = os.path.join(target_directory, fasta_base_filename) + fasta_writer = open(fasta_filename, 'wb+') + + tmp_extract_dir = os.path.join(tmp_dir, 'extracted_fasta') + os.mkdir(tmp_extract_dir) + + tmp_fasta = open(ncbi_fasta_filename, 'wb+') + + ftp.retrbinary('RETR %s' % ncbi_file_name, tmp_fasta.write) + tmp_fasta.flush() - tmp_fasta.seek( 0 ) - - fasta_readers = get_stream_reader( tmp_fasta, tmp_extract_dir ) - - data_table_entry = _stream_fasta_to_file( fasta_readers, target_directory, database_id, database_name, params ) - _add_data_table_entry( data_manager_dict, data_table_entry ) - + tmp_fasta.seek(0) + + fasta_readers = get_stream_reader(tmp_fasta, tmp_extract_dir) + + data_table_entry = _stream_fasta_to_file(fasta_readers, target_directory, database_id, database_name, params) + _add_data_table_entry(data_manager_dict, data_table_entry) + for fasta_reader in fasta_readers: fasta_reader.close() tmp_fasta.close() - cleanup_before_exit( tmp_dir ) + cleanup_before_exit(tmp_dir) -def download_from_url( data_manager_dict, params, target_directory, database_id, database_name ): - #TODO: we should automatically do decompression here - urls = filter( bool, map( lambda x: x.strip(), params['param_dict']['reference_source']['user_url'].split( '\n' ) ) ) - fasta_reader = [ urllib2.urlopen( url ) for url in urls ] - - data_table_entry = _stream_fasta_to_file( fasta_reader, target_directory, database_id, database_name, params ) - _add_data_table_entry( data_manager_dict, data_table_entry ) +def download_from_url(data_manager_dict, params, target_directory, database_id, database_name): + # TODO: we should automatically do decompression here + urls = filter(bool, map(lambda x: x.strip(), params['param_dict']['reference_source']['user_url'].split('\n'))) + fasta_reader = [urllib2.urlopen(url) for url in urls] + + data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params) + _add_data_table_entry(data_manager_dict, data_table_entry) -def download_from_history( data_manager_dict, params, target_directory, database_id, database_name ): - #TODO: allow multiple FASTA input files +def download_from_history(data_manager_dict, params, target_directory, database_id, database_name): + # TODO: allow multiple FASTA input files input_filename = params['param_dict']['reference_source']['input_fasta'] - if isinstance( input_filename, list ): - fasta_reader = [ open( filename, 'rb' ) for filename in input_filename ] + if isinstance(input_filename, list): + fasta_reader = [open(filename, 'rb') for filename in input_filename] else: - fasta_reader = open( input_filename ) - - data_table_entry = _stream_fasta_to_file( fasta_reader, target_directory, database_id, database_name, params ) - _add_data_table_entry( data_manager_dict, data_table_entry ) + fasta_reader = open(input_filename) + + data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params) + _add_data_table_entry(data_manager_dict, data_table_entry) -def copy_from_directory( data_manager_dict, params, target_directory, database_id, database_name ): +def copy_from_directory(data_manager_dict, params, target_directory, database_id, database_name): input_filename = params['param_dict']['reference_source']['fasta_filename'] create_symlink = params['param_dict']['reference_source']['create_symlink'] == 'create_symlink' if create_symlink: - data_table_entry = _create_symlink( input_filename, target_directory, database_id, database_name ) + data_table_entry = _create_symlink(input_filename, target_directory, database_id, database_name) else: - if isinstance( input_filename, list ): - fasta_reader = [ open( filename, 'rb' ) for filename in input_filename ] + if isinstance(input_filename, list): + fasta_reader = [open(filename, 'rb') for filename in input_filename] else: - fasta_reader = open( input_filename ) - data_table_entry = _stream_fasta_to_file( fasta_reader, target_directory, database_id, database_name, params ) - _add_data_table_entry( data_manager_dict, data_table_entry ) + fasta_reader = open(input_filename) + data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params) + _add_data_table_entry(data_manager_dict, data_table_entry) -def _add_data_table_entry( data_manager_dict, data_table_entry ): - data_manager_dict['data_tables'] = data_manager_dict.get( 'data_tables', {} ) - data_manager_dict['data_tables']['diamond_database'] = data_manager_dict['data_tables'].get( 'diamond_database', [] ) - data_manager_dict['data_tables']['diamond_database'].append( data_table_entry ) +def _add_data_table_entry(data_manager_dict, data_table_entry): + data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {}) + data_manager_dict['data_tables']['diamond_database'] = data_manager_dict['data_tables'].get('diamond_database', []) + data_manager_dict['data_tables']['diamond_database'].append(data_table_entry) return data_manager_dict -def _stream_fasta_to_file( fasta_stream, target_directory, database_id, database_name, params, close_stream=True ): +def _stream_fasta_to_file(fasta_stream, target_directory, database_id, database_name, params, close_stream=True): fasta_base_filename = "%s.fa" % database_id - fasta_filename = os.path.join( target_directory, fasta_base_filename ) + fasta_filename = os.path.join(target_directory, fasta_base_filename) - temp_fasta = tempfile.NamedTemporaryFile( delete=False, suffix=".fasta" ) + temp_fasta = tempfile.NamedTemporaryFile(delete=False, suffix=".fasta") temp_fasta.close() - fasta_writer = open( temp_fasta.name, 'wb+' ) + fasta_writer = open(temp_fasta.name, 'wb+') - if isinstance( fasta_stream, list ) and len( fasta_stream ) == 1: + if isinstance(fasta_stream, list) and len(fasta_stream) == 1: fasta_stream = fasta_stream[0] - if isinstance( fasta_stream, list ): + if isinstance(fasta_stream, list): last_char = None for fh in fasta_stream: - if last_char not in [ None, '\n', '\r' ]: - fasta_writer.write( '\n' ) + if last_char not in [None, '\n', '\r']: + fasta_writer.write('\n') while True: - data = fh.read( CHUNK_SIZE ) + data = fh.read(CHUNK_SIZE) if data: - fasta_writer.write( data ) + fasta_writer.write(data) last_char = data[-1] else: break @@ -173,9 +173,9 @@ fh.close() else: while True: - data = fasta_stream.read( CHUNK_SIZE ) + data = fasta_stream.read(CHUNK_SIZE) if data: - fasta_writer.write( data ) + fasta_writer.write(data) else: break if close_stream: @@ -183,56 +183,58 @@ fasta_writer.close() - args = [ 'diamond', 'makedb', '--in', temp_fasta.name, '--db', fasta_filename] + args = ['diamond', 'makedb', '--in', temp_fasta.name, '--db', fasta_filename] - tmp_stderr = tempfile.NamedTemporaryFile( prefix = "tmp-data-manager-diamond-database-builder-stderr" ) - proc = subprocess.Popen( args=args, shell=False, cwd=target_directory, stderr=tmp_stderr.fileno() ) + tmp_stderr = tempfile.NamedTemporaryFile(prefix="tmp-data-manager-diamond-database-builder-stderr") + proc = subprocess.Popen(args=args, shell=False, cwd=target_directory, stderr=tmp_stderr.fileno()) return_code = proc.wait() if return_code: tmp_stderr.flush() tmp_stderr.seek(0) print >> sys.stderr, "Error building diamond database:" while True: - chunk = tmp_stderr.read( CHUNK_SIZE ) + chunk = tmp_stderr.read(CHUNK_SIZE) if not chunk: break - sys.stderr.write( chunk ) - sys.exit( return_code ) + sys.stderr.write(chunk) + sys.exit(return_code) tmp_stderr.close() - os.remove( temp_fasta.name ) - return dict( value=database_id, name=database_name, db_path="%s.dmnd" % fasta_base_filename ) + os.remove(temp_fasta.name) + return dict(value=database_id, name=database_name, db_path="%s.dmnd" % fasta_base_filename) -def _create_symlink( input_filename, target_directory, database_id, database_name ): +def _create_symlink(input_filename, target_directory, database_id, database_name): fasta_base_filename = "%s.fa" % database_id - fasta_filename = os.path.join( target_directory, fasta_base_filename ) - os.symlink( input_filename, fasta_filename ) - return dict( value=database_id, name=database_name, db_path=fasta_base_filename ) + fasta_filename = os.path.join(target_directory, fasta_base_filename) + os.symlink(input_filename, fasta_filename) + return dict(value=database_id, name=database_name, db_path=fasta_base_filename) -REFERENCE_SOURCE_TO_DOWNLOAD = dict( ncbi=download_from_ncbi, url=download_from_url, history=download_from_history, directory=copy_from_directory ) +REFERENCE_SOURCE_TO_DOWNLOAD = dict(ncbi=download_from_ncbi, url=download_from_url, history=download_from_history, directory=copy_from_directory) + def main(): - #Parse Command Line + # Parse Command Line parser = optparse.OptionParser() - parser.add_option( '-d', '--dbkey_description', dest='dbkey_description', action='store', type="string", default=None, help='dbkey_description' ) + parser.add_option('-d', '--dbkey_description', dest='dbkey_description', action='store', type="string", default=None, help='dbkey_description') (options, args) = parser.parse_args() - + filename = args[0] - - params = from_json_string( open( filename ).read() ) - target_directory = params[ 'output_data' ][0]['extra_files_path'] - os.mkdir( target_directory ) + + params = json.loads(open(filename).read()) + target_directory = params['output_data'][0]['extra_files_path'] + os.mkdir(target_directory) data_manager_dict = {} database_id = params['param_dict']['database_id'] database_name = params['param_dict']['database_name'] - #Fetch the FASTA - REFERENCE_SOURCE_TO_DOWNLOAD[ params['param_dict']['reference_source']['reference_source_selector'] ]( data_manager_dict, params, target_directory, database_id, database_name ) - - #save info to json file - open( filename, 'wb' ).write( to_json_string( data_manager_dict ) ) + # Fetch the FASTA + REFERENCE_SOURCE_TO_DOWNLOAD[params['param_dict']['reference_source']['reference_source_selector']](data_manager_dict, params, target_directory, database_id, database_name) + + # save info to json file + open(filename, 'w').write(json.dumps(data_manager_dict, sort_keys=True)) + if __name__ == "__main__": main()
--- a/data_manager/data_manager_diamond_database_builder.xml Sun Feb 08 10:05:20 2015 -0500 +++ b/data_manager/data_manager_diamond_database_builder.xml Tue Dec 03 17:39:48 2019 -0500 @@ -1,10 +1,11 @@ -<tool id="diamond_database_builder" name="Diamond" tool_type="manage_data" version="0.0.1"> +<tool id="diamond_database_builder" name="Diamond" tool_type="manage_data" version="0.0.2"> <description> Database builder</description> <requirements> - <requirement type="package" version="0.6.13">diamond</requirement> + <requirement type="package" version="0.9.29">diamond</requirement> + <requirement type="package" version="2.7">python</requirement> </requirements> - <command interpreter="python"> - data_manager_diamond_database_builder.py "${out_file}" + <command> + python '$__tool_directory__/data_manager_diamond_database_builder.py' '${out_file}' </command> <inputs> <param type="text" name="database_name" value="" label="Database name or description" @@ -35,6 +36,17 @@ <outputs> <data name="out_file" format="data_manager_json"/> </outputs> + <tests> + <test> + <param name="database_name" value="PhiX174"/> + <param name="database_id" value="sequence_id"/> + <conditional name="reference_source"> + <param name="reference_source_selector" value="history"/> + <param name="input_fasta" value="phiX174.fasta"/> + </conditional> + <output name="out_file" value="diamond_data_manager.json"/> + </test> + </tests> <help> .. class:: infomark @@ -43,4 +55,7 @@ For example the NR database can be downloaded from ftp://ftp.ncbi.nlm.nih.gov/blast/db/FASTA/nr.gz </help> + <citations> + <citation type="doi">10.1038/nmeth.3176</citation> + </citations> </tool>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/diamond_data_manager.json Tue Dec 03 17:39:48 2019 -0500 @@ -0,0 +1,1 @@ +{"data_tables": {"diamond_database": [{"db_path": "sequence_id.fa.dmnd", "name": "PhiX174", "value": "sequence_id"}]}} \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/diamond_database.loc Tue Dec 03 17:39:48 2019 -0500 @@ -0,0 +1,18 @@ +#This file lists the locations and dbkeys of all the fasta files +#under the "genome" directory (a directory that contains a directory +#for each build). The script extract_fasta.py will generate the file +#all_fasta.loc. This file has the format (white space characters are +#TAB characters): +# +#<unique_build_id> <dbkey> <display_name> <file_path> +# +#So, all_fasta.loc could look something like this: +# +#apiMel3 apiMel3 Honeybee (Apis mellifera): apiMel3 /path/to/genome/apiMel3/apiMel3.fa +#hg19canon hg19 Human (Homo sapiens): hg19 Canonical /path/to/genome/hg19/hg19canon.fa +#hg19full hg19 Human (Homo sapiens): hg19 Full /path/to/genome/hg19/hg19full.fa +# +#Your all_fasta.loc file should contain an entry for each individual +#fasta file. So there will be multiple fasta files for each build, +#such as with hg19 above. +#
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test-data/phiX174.fasta Tue Dec 03 17:39:48 2019 -0500 @@ -0,0 +1,79 @@ +>phiX174 +GAGTTTTATCGCTTCCATGACGCAGAAGTTAACACTTTCGGATATTTCTGATGAGTCGAAAAATTATCTT +GATAAAGCAGGAATTACTACTGCTTGTTTACGAATTAAATCGAAGTGGACTGCTGGCGGAAAATGAGAAA +ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTACTTTGCGACCTTTCGCCATCAACTAACGATTCTG +TCAAAAACTGACGCGTTGGATGAGGAGAAGTGGCTTAATATGCTTGGCACGTTCGTCAAGGACTGGTTTA +GATATGAGTCACATTTTGTTCATGGTAGAGATTCTCTTGTTGACATTTTAAAAGAGCGTGGATTACTATC +TGAGTCCGATGCTGTTCAACCACTAATAGGTAAGAAATCATGAGTCAAGTTACTGAACAATCCGTACGTT +TCCAGACCGCTTTGGCCTCTATTAAGCTCATTCAGGCTTCTGCCGTTTTGGATTTAACCGAAGATGATTT +CGATTTTCTGACGAGTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTGCTCGTCGCTGCGTTGAGGCT +TGCGTTTATGGTACGCTGGACTTTGTGGGATACCCTCGCTTTCCTGCTCCTGTTGAGTTTATTGCTGCCG +TCATTGCTTATTATGTTCATCCCGTCAACATTCAAACGGCCTGTCTCATCATGGAAGGCGCTGAATTTAC +GGAAAACATTATTAATGGCGTCGAGCGTCCGGTTAAAGCCGCTGAATTGTTCGCGTTTACCTTGCGTGTA +CGCGCAGGAAACACTGACGTTCTTACTGACGCAGAAGAAAACGTGCGTCAAAAATTACGTGCAGAAGGAG +TGATGTAATGTCTAAAGGTAAAAAACGTTCTGGCGCTCGCCCTGGTCGTCCGCAGCCGTTGCGAGGTACT +AAAGGCAAGCGTAAAGGCGCTCGTCTTTGGTATGTAGGTGGTCAACAATTTTAATTGCAGGGGCTTCGGC +CCCTTACTTGAGGATAAATTATGTCTAATATTCAAACTGGCGCCGAGCGTATGCCGCATGACCTTTCCCA +TCTTGGCTTCCTTGCTGGTCAGATTGGTCGTCTTATTACCATTTCAACTACTCCGGTTATCGCTGGCGAC +TCCTTCGAGATGGACGCCGTTGGCGCTCTCCGTCTTTCTCCATTGCGTCGTGGCCTTGCTATTGACTCTA +CTGTAGACATTTTTACTTTTTATGTCCCTCATCGTCACGTTTATGGTGAACAGTGGATTAAGTTCATGAA +GGATGGTGTTAATGCCACTCCTCTCCCGACTGTTAACACTACTGGTTATATTGACCATGCCGCTTTTCTT +GGCACGATTAACCCTGATACCAATAAAATCCCTAAGCATTTGTTTCAGGGTTATTTGAATATCTATAACA +ACTATTTTAAAGCGCCGTGGATGCCTGACCGTACCGAGGCTAACCCTAATGAGCTTAATCAAGATGATGC +TCGTTATGGTTTCCGTTGCTGCCATCTCAAAAACATTTGGACTGCTCCGCTTCCTCCTGAGACTGAGCTT +TCTCGCCAAATGACGACTTCTACCACATCTATTGACATTATGGGTCTGCAAGCTGCTTATGCTAATTTGC +ATACTGACCAAGAACGTGATTACTTCATGCAGCGTTACCGTGATGTTATTTCTTCATTTGGAGGTAAAAC +CTCTTATGACGCTGACAACCGTCCTTTACTTGTCATGCGCTCTAATCTCTGGGCATCTGGCTATGATGTT +GATGGAACTGACCAAACGTCGTTAGGCCAGTTTTCTGGTCGTGTTCAACAGACCTATAAACATTCTGTGC +CGCGTTTCTTTGTTCCTGAGCATGGCACTATGTTTACTCTTGCGCTTGTTCGTTTTCCGCCTACTGCGAC +TAAAGAGATTCAGTACCTTAACGCTAAAGGTGCTTTGACTTATACCGATATTGCTGGCGACCCTGTTTTG +TATGGCAACTTGCCGCCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCTGGTGATTCGTCTAAGAAGT +TTAAGATTGCTGAGGGTCAGTGGTATCGTTATGCGCCTTCGTATGTTTCTCCTGCTTATCACCTTCTTGA +AGGCTTCCCATTCATTCAGGAACCGCCTTCTGGTGATTTGCAAGAACGCGTACTTATTCGCCACCATGAT +TATGACCAGTGTTTCCAGTCCGTTCAGTTGTTGCAGTGGAATAGTCAGGTTAAATTTAATGTGACCGTTT +ATCGCAATCTGCCGACCACTCGCGATTCAATCATGACTTCGTGATAAAAGATTGAGTGTGAGGTTATAAC +GCCGAAGCGGTAAAAATTTTAATTTTTGCCGCTGAGGGGTTGACCAAGCGAAGCGCGGTAGGTTTTCTGC +TTAGGAGTTTAATCATGTTTCAGACTTTTATTTCTCGCCATAATTCAAACTTTTTTTCTGATAAGCTGGT +TCTCACTTCTGTTACTCCAGCTTCTTCGGCACCTGTTTTACAGACACCTAAAGCTACATCGTCAACGTTA +TATTTTGATAGTTTGACGGTTAATGCTGGTAATGGTGGTTTTCTTCATTGCATTCAGATGGATACATCTG +TCAACGCCGCTAATCAGGTTGTTTCTGTTGGTGCTGATATTGCTTTTGATGCCGACCCTAAATTTTTTGC +CTGTTTGGTTCGCTTTGAGTCTTCTTCGGTTCCGACTACCCTCCCGACTGCCTATGATGTTTATCCTTTG +AATGGTCGCCATGATGGTGGTTATTATACCGTCAAGGACTGTGTGACTATTGACGTCCTTCCCCGTACGC +CGGGCAATAATGTTTATGTTGGTTTCATGGTTTGGTCTAACTTTACCGCTACTAAATGCCGCGGATTGGT +TTCGCTGAATCAGGTTATTAAAGAGATTATTTGTCTCCAGCCACTTAAGTGAGGTGATTTATGTTTGGTG +CTATTGCTGGCGGTATTGCTTCTGCTCTTGCTGGTGGCGCCATGTCTAAATTGTTTGGAGGCGGTCAAAA +AGCCGCCTCCGGTGGCATTCAAGGTGATGTGCTTGCTACCGATAACAATACTGTAGGCATGGGTGATGCT +GGTATTAAATCTGCCATTCAAGGCTCTAATGTTCCTAACCCTGATGAGGCCGCCCCTAGTTTTGTTTCTG +GTGCTATGGCTAAAGCTGGTAAAGGACTTCTTGAAGGTACGTTGCAGGCTGGCACTTCTGCCGTTTCTGA +TAAGTTGCTTGATTTGGTTGGACTTGGTGGCAAGTCTGCCGCTGATAAAGGAAAGGATACTCGTGATTAT +CTTGCTGCTGCATTTCCTGAGCTTAATGCTTGGGAGCGTGCTGGTGCTGATGCTTCCTCTGCTGGTATGG +TTGACGCCGGATTTGAGAATCAAAAAGAGCTTACTAAAATGCAACTGGACAATCAGAAAGAGATTGCCGA +GATGCAAAATGAGACTCAAAAAGAGATTGCTGGCATTCAGTCGGCGACTTCACGCCAGAATACGAAAGAC +CAGGTATATGCACAAAATGAGATGCTTGCTTATCAACAGAAGGAGTCTACTGCTCGCGTTGCGTCTATTA +TGGAAAACACCAATCTTTCCAAGCAACAGCAGGTTTCCGAGATTATGCGCCAAATGCTTACTCAAGCTCA +AACGGCTGGTCAGTATTTTACCAATGACCAAATCAAAGAAATGACTCGCAAGGTTAGTGCTGAGGTTGAC +TTAGTTCATCAGCAAACGCAGAATCAGCGGTATGGCTCTTCTCATATTGGCGCTACTGCAAAGGATATTT +CTAATGTCGTCACTGATGCTGCTTCTGGTGTGGTTGATATTTTTCATGGTATTGATAAAGCTGTTGCCGA +TACTTGGAACAATTTCTGGAAAGACGGTAAAGCTGATGGTATTGGCTCTAATTTGTCTAGGAAATAACCG +TCAGGATTGACACCCTCCCAATTGTATGTTTTCATGCCTCCAAATCTTGGAGGCTTTTTTATGGTTCGTT +CTTATTACCCTTCTGAATGTCACGCTGATTATTTTGACTTTGAGCGTATCGAGGCTCTTAAACCTGCTAT +TGAGGCTTGTGGCATTTCTACTCTTTCTCAATCCCCAATGCTTGGCTTCCATAAGCAGATGGATAACCGC +ATCAAGCTCTTGGAAGAGATTCTGTCTTTTCGTATGCAGGGCGTTGAGTTCGATAATGGTGATATGTATG +TTGACGGCCATAAGGCTGCTTCTGACGTTCGTGATGAGTTTGTATCTGTTACTGAGAAGTTAATGGATGA +ATTGGCACAATGCTACAATGTGCTCCCCCAACTTGATATTAATAACACTATAGACCACCGCCCCGAAGGG +GACGAAAAATGGTTTTTAGAGAACGAGAAGACGGTTACGCAGTTTTGCCGCAAGCTGGCTGCTGAACGCC +CTCTTAAGGATATTCGCGATGAGTATAATTACCCCAAAAAGAAAGGTATTAAGGATGAGTGTTCAAGATT +GCTGGAGGCCTCCACTATGAAATCGCGTAGAGGCTTTACTATTCAGCGTTTGATGAATGCAATGCGACAG +GCTCATGCTGATGGTTGGTTTATCGTTTTTGACACTCTCACGTTGGCTGACGACCGATTAGAGGCGTTTT +ATGATAATCCCAATGCTTTGCGTGACTATTTTCGTGATATTGGTCGTATGGTTCTTGCTGCCGAGGGTCG +CAAGGCTAATGATTCACACGCCGACTGCTATCAGTATTTTTGTGTGCCTGAGTATGGTACAGCTAATGGC +CGTCTTCATTTCCATGCGGTGCATTTTATGCGGACACTTCCTACAGGTAGCGTTGACCCTAATTTTGGTC +GTCGGGTACGCAATCGCCGCCAGTTAAATAGCTTGCAAAATACGTGGCCTTATGGTTACAGTATGCCCAT +CGCAGTTCGCTACACGCAGGACGCTTTTTCACGTTCTGGTTGGTTGTGGCCTGTTGATGCTAAAGGTGAG +CCGCTTAAAGCTACCAGTTATATGGCTGTTGGTTTCTATGTGGCTAAATACGTTAACAAAAAGTCAGATA +TGGACCTTGCTGCTAAAGGTCTAGGAGCTAAAGAATGGAACAACTCACTAAAAACCAAGCTGTCGCTACT +TCCCAAGAAGCTGTTCAGAATCAGAATGAGCCGCAACTTCGGGATGAAAATGCTCACAATGACAAATCTG +TCCACGGAGTGCTTAATCCAACTTACCAAGCTGGGTTACGACGCGACGCCGTTCAACCAGATATTGAAGC +AGAACGCAAAAAGAGAGATGAGATTGAGGCTGGGAAAAGTTACTGTAGCCGACGTTTTGGCGGCGCAACC +TGTGACGACAAATCTGCTCAAATTTATGCGCGCTTCGATAAAAATGATTGGCGTATCCAACCTGCA +
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/tool_data_table_conf.xml.test Tue Dec 03 17:39:48 2019 -0500 @@ -0,0 +1,7 @@ +<tables> + <!-- Locations of all fasta files required to build Diamond databases --> + <table name="diamond_database" comment_char="#"> + <columns>value, name, db_path</columns> + <file path="${__HERE__}/test-data/diamond_database.loc" /> + </table> +</tables>
--- a/tool_dependencies.xml Sun Feb 08 10:05:20 2015 -0500 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -<?xml version="1.0"?> -<tool_dependency> - <package name="diamond" version="0.6.13"> - <repository changeset_revision="47c4dfd5aed5" name="package_diamond_0_6_13" owner="iuc" toolshed="https://toolshed.g2.bx.psu.edu" /> - </package> -</tool_dependency>