Mercurial > repos > fabio > gdcwebapp
diff json_data_source_mod.py @ 10:c0be9583df97 draft
Uploaded 20170525
author | fabio |
---|---|
date | Thu, 25 May 2017 17:58:15 -0400 |
parents | 7815152f70c6 |
children | 80593f75d74a |
line wrap: on
line diff
--- a/json_data_source_mod.py Thu May 25 15:49:07 2017 -0400 +++ b/json_data_source_mod.py Thu May 25 17:58:15 2017 -0400 @@ -145,6 +145,7 @@ extra_files_path = ''.join( [ target_output_filename, 'files' ] ) download_extra_data( extra_data, extra_files_path ) + """ the following code handles archives and decompress them in a collection """ check_ext = "" if ( fname.endswith( "gz" ) ): check_ext = "r:gz" @@ -157,25 +158,25 @@ for entry in tf: fileobj = tf.extractfile( entry ) if entry.isfile(): - """ - dataset_url, output_filename, \ - extra_files_path, file_name, \ - ext, out_data_name, \ - hda_id, dataset_id = set_up_config_values(json_params) - """ + + #dataset_url, output_filename, \ + # extra_files_path, file_name, \ + # ext, out_data_name, \ + # hda_id, dataset_id = set_up_config_values(json_params) + filename = os.path.basename( entry.name ) extension = splitext( filename ) extra_data = None #target_output_filename = output_filename - """ (?P<archive_name>.*)_(?P<file_name>.*)\..* """ + # (?P<archive_name>.*)_(?P<file_name>.*)\..* filename_with_collection_prefix = query_item.get( 'name' ) + "_" + filename target_output_filename = os.path.join(appdata_path, filename_with_collection_prefix) - """ - metadata_parameter_file.write( metadata_to_json_for_archive_entry( dataset_id, extension, - filename, target_output_filename, - ds_type='dataset', - primary=primary) ) - """ + + #metadata_parameter_file.write( metadata_to_json_for_archive_entry( dataset_id, extension, + # filename, target_output_filename, + # ds_type='dataset', + # primary=primary) ) + store_file_from_archive( fileobj, target_output_filename ) return True