changeset 0:81ed42c0721a draft

planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_manual commit 9f2cdfc1302dce000f9a86d13d4af204cf1dc89d
author iuc
date Sun, 14 Jan 2018 09:52:16 -0500
parents
children 6524e573d9c2
files README data_manager/data_manager_manual.py data_manager/data_manager_manual.xml data_manager_conf.xml tool_data_table_conf.xml.sample
diffstat 5 files changed, 248 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/README	Sun Jan 14 09:52:16 2018 -0500
@@ -0,0 +1,1 @@
+This tool manually builds local data and populates tool data tables.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/data_manager_manual.py	Sun Jan 14 09:52:16 2018 -0500
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+# Dan Blankenberg
+
+import json
+import logging
+import optparse
+import os
+import shutil
+import tempfile
+import urllib2
+
+from xml.etree.ElementTree import tostring
+try:
+    # For Python 3.0 and later
+    from shutil import unpack_archive
+except ImportError:
+    # Fall back to Python 2 import
+    from setuptools.archive_util import unpack_archive
+
+try:
+    # For Python 3.0 and later
+    from urllib.request import urlretrieve
+except ImportError:
+    # Fall back to Python 2 imports
+    from urllib import urlretrieve
+
+_log_name = __name__
+if _log_name == '__builtin__':
+    _log_name = 'toolshed.installed.manual.data.manager'
+log = logging.getLogger( _log_name )
+
+
+# --- These methods are called by/within the Galaxy Application
+def exec_before_job( app, inp_data, out_data, param_dict, tool=None, **kwd ):
+    # Look for any data tables that haven't been defined for this data manager before and dynamically add them to Galaxy
+    param_dict = dict( **param_dict )
+    data_tables_param = param_dict.get( 'data_tables', [] )
+    if not isinstance( data_tables_param, list ):
+        data_tables_param = [data_tables_param]
+    if tool:
+        tool_shed_repository = tool.tool_shed_repository
+    else:
+        tool_shed_repository = None
+    tdtm = None
+    data_manager = app.data_managers.get_manager( tool.data_manager_id, None )
+    for data_table_param in data_tables_param:
+        data_table_name = data_table_param.get( 'data_table_name', None )
+        if data_table_name:
+            # get data table managed by this data Manager
+            data_table = app.tool_data_tables.get_tables().get( str( data_table_name ), None )
+            if data_table:
+                data_table_filename = data_table.get_filename_for_source( data_manager, None )
+                if not data_table_filename:
+                    if tdtm is None:
+                        from tool_shed.tools import data_table_manager
+                        tdtm = data_table_manager.ToolDataTableManager( app )
+                        target_dir, tool_path, relative_target_dir = tdtm.get_target_install_dir( tool_shed_repository )
+                    # Dynamically add this data table
+                    log.debug( "Attempting to dynamically create a missing Tool Data Table named %s." % data_table_name )
+                    repo_info = tdtm.generate_repository_info_elem_from_repository( tool_shed_repository, parent_elem=None )
+                    if repo_info is not None:
+                        repo_info = tostring( repo_info )
+                    tmp_file = tempfile.NamedTemporaryFile()
+                    tmp_file.write( __get_new_xml_definition( app, data_table, data_manager, repo_info, target_dir ) )
+                    tmp_file.flush()
+                    app.tool_data_tables.add_new_entries_from_config_file( tmp_file.name, None, app.config.shed_tool_data_table_config, persist=True )
+                    tmp_file.close()
+
+
+def __get_new_xml_definition( app, data_table, data_manager, repo_info=None, location_file_dir=None ):
+    sub_dict = { 'table_name': data_table.name, 'comment_char': '', 'columns': '', 'file_path': '' }
+    sub_dict.update( data_manager.get_tool_shed_repository_info_dict() )
+    if data_table.comment_char:
+        sub_dict['comment_char'] = 'comment_char="%s"' % ( data_table.comment_char )
+    for i, name in enumerate( data_table.get_column_name_list() ):
+        if name is not None:
+            sub_dict['columns'] = "%s\n%s" % ( sub_dict['columns'], '<column name="%s" index="%s" />' % ( name, i ) )
+    location_file_dir = location_file_dir or app.config.galaxy_data_manager_data_path
+    for filename in data_table.filenames.keys():
+        sub_dict['file_path'] = os.path.basename( filename )
+        sub_dict['file_path'] = os.path.join( location_file_dir, sub_dict['file_path'] )  # os.path.abspath?
+        if not os.path.exists( sub_dict['file_path'] ):
+            # Create empty file
+            log.debug( "Attempting to create a missing location file %s." % sub_dict['file_path'] )
+            open( sub_dict['file_path'], 'wb+' ).close()
+        break
+    sub_dict[ 'repo_info' ] = repo_info or ''
+    return """
+            <tables><table name="%(table_name)s" %(comment_char)s>
+                %(columns)s
+                <file path="%(file_path)s" />
+                %(repo_info)s
+            </table></tables>
+           """ % sub_dict
+
+
+def galaxy_code_get_available_data_tables( trans ):
+    # list of data tables
+    return [ ( x, x, False ) for x in trans.app.tool_data_tables.get_tables().keys() ]
+
+
+def galaxy_code_get_available_data_table_columns( trans, data_table_name ):
+    return [ ( x, x, True ) for x in trans.app.tool_data_tables.get( data_table_name ).get_column_name_list() ]
+# --- End Galaxy called Methods ---
+
+
+def get_data_table_entries( params, galaxy_data_manager_data_path ):
+    rval = {}
+    data_tables = params.get( 'data_tables', [] )
+    for data_table in data_tables:
+        entry_dict = {}
+        for column in data_table.get( 'columns', [] ):
+            value = column.get( 'data_table_column_value', '' )
+            if column.get( 'is_path', {} ).get( 'is_path_selector', None ) == 'yes' and column.get( 'is_path', {} ).get( 'abspath', None ) == 'abspath':
+                value = os.path.abspath( os.path.join( galaxy_data_manager_data_path, value ) )
+            entry_dict[ column.get( 'data_table_column_name', '' ) ] = value
+        data_table_name = data_table['data_table_name']
+        rval[ data_table_name ] = rval.get( data_table_name, [] )
+        rval[ data_table_name ].append( entry_dict )
+    return rval
+
+
+def get_file_content( params, target_directory ):
+    directory_content = params.get( 'directory_content', [] )
+    for content in directory_content:
+        target_path = os.path.join( target_directory, content.get( 'subdir', '' ) )
+        try:
+            os.makedirs( target_path )
+        except OSError:
+            pass
+        if content.get( 'file_source', {}).get( 'file_source_selector', None ) == 'URL':
+            ( filename, headers ) = urlretrieve( content.get( 'file_source', {}).get( 'file_URL', None ) )
+            try:
+                bname = headers['Content-Disposition']
+            except KeyError:
+                bname = os.path.basename( urllib2.urlparse.urlsplit( content.get( 'file_source', {}).get( 'file_URL', None ) ).path )
+        else:
+            filename = content.get( 'file_source', {}).get( 'file_history', None )
+            bname = os.path.basename( filename )
+        file_action = content.get( 'file_action', {}).get( 'file_action_selector', None )
+        if file_action == 'unpack':
+            unpack_archive( filename, target_path )
+        else:
+            filename_override = content.get( 'file_action', {}).get( 'filename_override', None )
+            if filename_override:
+                target_path = os.path.join( target_path, filename_override )
+            else:
+                target_path = os.path.join( target_path, bname )
+            shutil.copyfile( filename, target_path )
+    return len( directory_content )
+
+
+def main():
+    parser = optparse.OptionParser()
+    parser.add_option( '', '--galaxy_data_manager_data_path', dest='galaxy_data_manager_data_path', default='', help='Root path for galaxy_data_manager_data_path' )
+    (options, args) = parser.parse_args()
+
+    filename = args[0]
+
+    params = json.loads( open( filename ).read() )
+    target_directory = params[ 'output_data' ][0]['extra_files_path']
+
+    data_table_entries = get_data_table_entries( params['param_dict'], options.galaxy_data_manager_data_path )
+
+    # save info to json file
+    open( filename, 'wb' ).write( json.dumps( { "data_tables": data_table_entries} ) )
+
+    get_file_content( params['param_dict'], target_directory )
+
+
+if __name__ == "__main__":
+    main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager/data_manager_manual.xml	Sun Jan 14 09:52:16 2018 -0500
@@ -0,0 +1,69 @@
+<tool id="data_manager_manual" name="Manual Data Manager" version="0.0.1" tool_type="manage_data">
+    <options sanitize="False" />
+    <description>Entry Builder</description>
+    <command detect_errors="exit_code"><![CDATA[
+        python '$__tool_directory__/data_manager_manual.py'
+        '${out_file}'
+        --galaxy_data_manager_data_path '${__app__.config.galaxy_data_manager_data_path}'
+    ]]></command>
+    <inputs>
+        <repeat name="data_tables" title="Data Table to define" min="1">
+            <param name="data_table_name" type="select" multiple="False" optional="False" label="Choose Desired Data Table"
+                dynamic_options="galaxy_code_get_available_data_tables( __trans__ )" refresh_on_change="True"/>
+        <repeat name="columns" title="Table Columns" min="1">
+            <param name="data_table_column_name" type="select" multiple="False" optional="False" label="Column Name"
+                dynamic_options="galaxy_code_get_available_data_table_columns( __trans__, data_table_name )" />
+            <!-- <param name="data_table_column_name" type="text" label="Column Name"/> -->
+            <param name="data_table_column_value" type="text" label="Value to use for data table column"/>
+            <conditional name="is_path">
+                <param name="is_path_selector" type="select" label="Value is a path">
+                    <option value="yes">Yes</option>
+                    <option value="no" selected="True">No</option>
+                </param>
+                <when value="yes">
+                    <param name="abspath" type="boolean" label="Apply abspath" checked="True" truevalue="abspath" falsevalue="" />
+                </when>
+                <when value="no"/>
+            </conditional>
+        </repeat>
+        </repeat>
+        <repeat name="directory_content" title="Directory Content">
+            <param name="subdir" type="text" label="Place in Subdirectory" value=""/>
+            <conditional name="file_source">
+                <param name="file_source_selector" type="select" label="File Content Source">
+                    <option value="history">History Item</option>
+                    <option value="URL" selected="True">URL</option>
+                </param>
+                <when value="history">
+                    <param name="file_history" type="data" multiple="false" optional="false" format="data" label="File from history" />
+                </when>
+                <when value="URL">
+                    <param name="file_URL" type="text" label="Fetch from URL" value=""/>
+                </when>
+            </conditional>
+            <conditional name="file_action">
+                <param name="file_action_selector" type="select" label="File Actions">
+                    <option value="unpack">Extract Archive</option>
+                    <option value="None" selected="True">None</option>
+                </param>
+                <when value="None">
+                    <param name="filename_override" type="text" label="Override with Filename" value=""/>
+                </when>
+                <when value="unpack">
+                </when>
+            </conditional>
+        </repeat>
+    </inputs>
+    <outputs>
+        <data name="out_file" format="data_manager_json" dbkey="dbkey"/>
+    </outputs>
+    <tests>
+    </tests>
+    <help>
+**What it does**
+
+This tool allows manually specifying data table entries and optionally providing filesystem contents.
+
+    </help>
+    <code file="data_manager_manual.py" />
+</tool>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/data_manager_conf.xml	Sun Jan 14 09:52:16 2018 -0500
@@ -0,0 +1,4 @@
+<?xml version="1.0"?>
+<data_managers>
+    <data_manager tool_file="data_manager/data_manager_manual.xml" id="data_manager_manual" version="0.0.1" undeclared_tables="True" />
+</data_managers>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tool_data_table_conf.xml.sample	Sun Jan 14 09:52:16 2018 -0500
@@ -0,0 +1,2 @@
+<tables>
+</tables>