) {\\n chop;\\n if (m/^>/) {\\n s/^>//;\\n if ($. > 1) {\\n print OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\n }\\n $gc = 0;\\n $length = 0;\\n } else {\\n ++$gc while m/[gc]/ig;\\n $length += length $_;\\n }\\n}\\nprint OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\nclose( IN );\\nclose( OUT );\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"perl version of gc counter from planemo example\", \"help_text\": \"**What it Does**\\ncounts gc using, ugh, perl...\\n\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"fasta\"], \"input_label\": \"input fasta file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"tsv\", \"history_CL\": \"2\"}], \"edit_params\": \"no\", \"additional_parameters\": []}, \"tool_name\": \"perlgc\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "3b6aab01-4759-4df6-801f-626678639e51",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "f964e779-2f92-4c81-9819-3e1ebc156664"
- },
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "7aea56bd-4f39-4d3b-8254-a6675161d059"
- }
- ]
- },
- "3": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 3,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 1,
- "output_name": "output"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 492,
- "height": 202,
- "left": 613,
- "right": 813,
- "top": 290,
- "width": 200,
- "x": 613,
- "y": 290
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"system\", \"__current_case__\": 1, \"exe_package\": \"sed\", \"exe_package_version\": \"\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"sed runner\", \"help_text\": \"sed '/old/new/g input.txt\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input text\", \"input_help\": \"parameter_help\", \"input_CL\": \"3\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"STDOUT\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"sedstring\", \"param_type\": \"text\", \"param_value\": \"s/def/bjork!bjorkdef/g\", \"param_label\": \"parameter_label\", \"param_help\": \"parameter_help\", \"param_CL\": \"1\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"sedtest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "2138c717-5128-4c4b-bc22-4809cd001c34",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "b0be8c95-7380-42b8-a16d-8e08578d4dd7"
- },
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "56635519-a9a0-49eb-8305-59cc1fcef99f"
- }
- ]
- },
- "4": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 4,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 1,
- "output_name": "output"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 652,
- "height": 242,
- "left": 613,
- "right": 813,
- "top": 410,
- "width": 200,
- "x": 613,
- "y": 410
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\nappendme = sys.argv[3]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write(appendme)\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"appendme\", \"param_type\": \"text\", \"param_value\": \"added at the end\", \"param_label\": \"append string\", \"param_help\": \"parameter_help\", \"param_CL\": \"3\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"pyrevaddpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "30dbe033-30c4-4228-b0cb-854df30f5594",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "37fdd905-471d-4479-a98a-4dfbaa6314be"
- },
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "7c8a8dba-1e8c-49d5-b51d-a0ab09931932"
- }
- ]
- },
- "5": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 5,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 1,
- "output_name": "output"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 772,
- "height": 242,
- "left": 613,
- "right": 813,
- "top": 530,
- "width": 200,
- "x": 613,
- "y": 530
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\nimport argparse\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--infile',default='')\\na('--outfile',default=None)\\nargs = parser.parse_args()\\ninp = args.infile\\noutp = args.outfile\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"reverse argparse\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"infile\", \"input_help\": \"parameter_help\", \"input_CL\": \"infile\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"outfile\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevargparse\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "91a0dccf-384c-491a-ae08-f426888d26cf",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "61a5271d-3940-4855-9093-a0710dc3fe08"
- },
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "a6602e23-dc1c-44b7-8ed7-cd9971ff9d30"
- }
- ]
- },
- "6": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 6,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 1,
- "output_name": "output"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 852,
- "height": 202,
- "left": 613,
- "right": 813,
- "top": 650,
- "width": 200,
- "x": 613,
- "y": 650
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"bash\", \"__current_case__\": 5, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"rev | tac\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"tacrev\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"0\", \"__current_case__\": 2, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"2\"}]}, \"tool_name\": \"tacrev\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "edb5d852-908b-45bf-8892-e0e8c337c31d",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "c1394cf9-bb03-4ac3-8466-8ee0cc30c0a0"
- },
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "e45566f4-d40e-4ad0-ad27-72ce814b13da"
- }
- ]
- },
- "7": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 7,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 1,
- "output_name": "output"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 992,
- "height": 222,
- "left": 613,
- "right": 813,
- "top": 770,
- "width": 200,
- "x": 613,
- "y": 770
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "08a48555-8700-4652-a76b-df1f54197049",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "e96ae086-a92a-4018-8f07-ebf4974807e6"
- },
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "b5bd73bb-1ddc-4161-be2e-370bab9aebbe"
- }
- ]
- },
- "8": {
- "annotation": "",
- "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "errors": null,
- "id": 8,
- "input_connections": {
- "ppass|history_inputs_0|input_files": {
- "id": 7,
- "output_name": "new_tool"
- }
- },
- "inputs": [],
- "label": null,
- "name": "toolfactory",
- "outputs": [
- {
- "name": "TF_run_report",
- "type": "input"
- },
- {
- "name": "new_tool",
- "type": "tgz"
- }
- ],
- "position": {
- "bottom": 412,
- "height": 242,
- "left": 833,
- "right": 1033,
- "top": 170,
- "width": 200,
- "x": 833,
- "y": 170
- },
- "post_job_actions": {},
- "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
- "tool_shed_repository": {
- "changeset_revision": "51fa77152988",
- "name": "tool_factory_2",
- "owner": "fubar",
- "tool_shed": "toolshed.g2.bx.psu.edu"
- },
- "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"import argparse\\nimport tarfile\\nimport os\\nimport tempfile\\nimport subprocess\\n\\n\\\"\\\"\\\"\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\n\\\"\\\"\\\"\\n\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--tooltgz',default='')\\na('--report',default=None)\\na('--toolout',default=None)\\na('--galaxy_root',default=None)\\nargs = parser.parse_args()\\ntoolname = args.toolout.split(os.sep)[-1]\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\ntf = tarfile.open(args.tooltgz,\\\"r:gz\\\")\\ntf.extractall(toolpath)\\ncl = \\\"planemo test --skip_venv --galaxy_root %s %s\\\" % (args.galaxy_root,toolpath)\\ncll = cl.split(' ')\\nsto = open(args.report, 'w')\\np = subprocess.run(cll, shell=False, stdout=sto)\\nretval = p.returncode\\nsto.close()\\n\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"Tool to test toolshed tool archives generated by the tool factory.\", \"help_text\": \"**What it Does**\\n\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\nIt was generated using the tool factory.\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"tgz\"], \"input_label\": \"tool toolshed tgz archive from history\", \"input_help\": \"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\", \"input_CL\": \"tooltgz\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"report\", \"history_format\": \"txt\", \"history_CL\": \"report\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"toolout\", \"param_type\": \"text\", \"param_value\": \"tools/toolmakers/planemotest\", \"param_label\": \"output path under galaxy root\", \"param_help\": \"This is where the tgz file will be extracted and tested by planemo\", \"param_CL\": \"toolout\", \"param_CLprefixed\": \"\"}, {\"__index__\": 1, \"param_name\": \"galaxy_root\", \"param_type\": \"text\", \"param_value\": \"/home/ross/galaxy\", \"param_label\": \"Galaxy source root directory to use for running planemo\", \"param_help\": \"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\", \"param_CL\": \"galaxy_root\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"planemotest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
- "tool_version": "2.00",
- "type": "tool",
- "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",
- "workflow_outputs": [
- {
- "label": null,
- "output_name": "TF_run_report",
- "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"
- },
- {
- "label": null,
- "output_name": "new_tool",
- "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"
- }
- ]
- }
- },
- "tags": [],
- "uuid": "321a7f9f-c287-453c-807a-43afd948770e",
- "version": 0
-}
diff -r b938475235e3 -r e7e9732ebed6 docker/dockerfile.seq
--- a/docker/dockerfile.seq Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,25 +0,0 @@
-# Galaxy - Toolshed docker
-
-FROM quay.io/bgruening/galaxy:19.01
-
-MAINTAINER Björn A. Grüning, bjoern.gruening@gmail.com
-
-ENV GALAXY_CONFIG_BRAND ToolFactory
-ENV GALAXY_CONFIG_SANITIZE_ALL_HTML false
-
-# Install tools
-#ADD data_managers.yaml $GALAXY_ROOT/data_managers.yaml
-#RUN install-tools $GALAXY_ROOT/data_managers.yaml && \
-# /tool_deps/_conda/bin/conda clean --tarballs && \
-# rm /export/galaxy-central/ -rf
-ADD my_tool_list.yml $GALAXY_ROOT/tools1.yaml
-RUN install-tools $GALAXY_ROOT/tools1.yaml && \
- /tool_deps/_conda/bin/conda clean --tarballs && \
- rm /export/galaxy-central/ -rf
-
-ADD TF_example_wf.ga $GALAXY_HOME/workflows/TF_example_wf.ga
-
-ADD post-start-actions.sh /export/post-start-actions.sh
-RUN chmod a+x /export/post-start-actions.sh
-
-
diff -r b938475235e3 -r e7e9732ebed6 docker/my_tool_list.yml
--- a/docker/my_tool_list.yml Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,9 +0,0 @@
-install_resolver_dependencies: true
-install_tool_dependencies: false
-tools:
-- name: tool_factory_2
- owner: fubar
- tool_panel_section_label: 'Make new Tools'
- tool_shed_url: https://toolshed.g2.bx.psu.edu
-
-
diff -r b938475235e3 -r e7e9732ebed6 docker/post-start-actions.sh
--- a/docker/post-start-actions.sh Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,5 +0,0 @@
-#!/bin/bash
-# hook to install tf demo workflow
-echo "#### post start actions.sh hook happening"
-chown $GALAXY_USER $GALAXY_ROOT/workflows/TF_example_wf.ga
-workflow-install -w $GALAXY_ROOT/workflows/TF_example_wf.ga -g http://localhost -a fakekey --publish_workflows
diff -r b938475235e3 -r e7e9732ebed6 docker/startgaldock.sh
--- a/docker/startgaldock.sh Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-docker run -d -p 8080:80 -v /home/ubuntu/galaxy_storage/:/export/ toolfactory
diff -r b938475235e3 -r e7e9732ebed6 docker/startup
--- a/docker/startup Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,460 +0,0 @@
-#!/usr/bin/env bash
-
-# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/
-
-if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then
- mkdir -p /export/tool_deps/
- mv /export/galaxy-central/tool_deps /export/
- ln -s /export/tool_deps/ $GALAXY_ROOT/
-fi
-
-# This is needed for Docker compose to have a unified alias for the main container.
-# Modifying /etc/hosts can only happen during runtime not during build-time
-echo "127.0.0.1 galaxy" >> /etc/hosts
-
-# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2
-ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null
-
-# If the Galaxy config file is not in the expected place, copy from the sample
-# and hope for the best (that the admin has done all the setup through env vars.)
-if [ ! -f $GALAXY_CONFIG_FILE ]
- then
- # this should succesfully copy either .yml or .ini sample file to the expected location
- cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE
-fi
-
-# Configure proxy prefix filtering
-if [[ ! -z $PROXY_PREFIX ]]
- then
- if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]
- then
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null
- else
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ module:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ socket:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ mount:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ manage-script-name:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' manage-script-name: true'" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()'" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' socket: unix:///srv/galaxy/var/uwsgi.sock'" &> /dev/null
-
- # Also set SCRIPT_NAME. It's not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm
- ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp='^ uwsgi_param SCRIPT_NAME' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter='^ include uwsgi_params' line=' uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};'" &> /dev/null
- fi
-
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null
-
- # Fix path to html assets
- ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp='(href=\"|\')[/\\w]*(/static)' replace='\\1${PROXY_PREFIX}\\2'" &> /dev/null
-
- # Set some other vars based on that prefix
- if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]
- then
- export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"
- fi
- if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]
- then
- export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"
- fi
-
- # Change the defaults nginx upload/x-accel paths
- if [ "$GALAXY_CONFIG_NGINX_UPLOAD_PATH" == "/_upload" ]
- then
- export GALAXY_CONFIG_NGINX_UPLOAD_PATH="${PROXY_PREFIX}${GALAXY_CONFIG_NGINX_UPLOAD_PATH}"
- fi
-fi
-
-# Disable authentication of Galaxy reports
-if [[ ! -z $DISABLE_REPORTS_AUTH ]]
- then
- # disable authentification
- echo "Disable Galaxy reports authentification "
- echo "" > /etc/nginx/conf.d/reports_auth.conf
- else
- # enable authentification
- echo "Enable Galaxy reports authentification "
- cp /etc/nginx/conf.d/reports_auth.conf.source /etc/nginx/conf.d/reports_auth.conf
-fi
-
-# Try to guess if we are running under --privileged mode
-if [[ ! -z $HOST_DOCKER_LEGACY ]]; then
- if mount | grep "/proc/kcore"; then
- PRIVILEGED=false
- else
- PRIVILEGED=true
- fi
-else
- # Taken from http://stackoverflow.com/questions/32144575/how-to-know-if-a-docker-container-is-running-in-privileged-mode
- ip link add dummy0 type dummy 2>/dev/null
- if [[ $? -eq 0 ]]; then
- PRIVILEGED=true
- # clean the dummy0 link
- ip link delete dummy0 2>/dev/null
- else
- PRIVILEGED=false
- fi
-fi
-
-cd $GALAXY_ROOT
-. $GALAXY_VIRTUAL_ENV/bin/activate
-
-if $PRIVILEGED; then
- umount /var/lib/docker
-fi
-
-if [[ ! -z $STARTUP_EXPORT_USER_FILES ]]; then
- # If /export/ is mounted, export_user_files file moving all data to /export/
- # symlinks will point from the original location to the new path under /export/
- # If /export/ is not given, nothing will happen in that step
- echo "Checking /export..."
- python3 /usr/local/bin/export_user_files.py $PG_DATA_DIR_DEFAULT
-fi
-
-# Delete compiled templates in case they are out of date
-if [[ ! -z $GALAXY_CONFIG_TEMPLATE_CACHE_PATH ]]; then
- rm -rf $GALAXY_CONFIG_TEMPLATE_CACHE_PATH/*
-fi
-
-# Enable loading of dependencies on startup. Such as LDAP.
-# Adapted from galaxyproject/galaxy/scripts/common_startup.sh
-if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]]
- then
- echo "Installing optional dependencies in galaxy virtual environment..."
- : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
- GALAXY_CONDITIONAL_DEPENDENCIES=$(PYTHONPATH=lib python -c "import galaxy.dependencies; print('\n'.join(galaxy.dependencies.optional('$GALAXY_CONFIG_FILE')))")
- [ -z "$GALAXY_CONDITIONAL_DEPENDENCIES" ] || echo "$GALAXY_CONDITIONAL_DEPENDENCIES" | pip install -q -r /dev/stdin --index-url "${GALAXY_WHEELS_INDEX_URL}"
-fi
-
-if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]] && [[ ! -z $LOAD_PYTHON_DEV_DEPENDENCIES ]]
- then
- echo "Installing development requirements in galaxy virtual environment..."
- : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
- dev_requirements='./lib/galaxy/dependencies/dev-requirements.txt'
- [ -f $dev_requirements ] && pip install -q -r $dev_requirements --index-url "${GALAXY_WHEELS_INDEX_URL}"
-fi
-
-# Enable Test Tool Shed
-if [[ ! -z $ENABLE_TTS_INSTALL ]]
- then
- echo "Enable installation from the Test Tool Shed."
- export GALAXY_CONFIG_TOOL_SHEDS_CONFIG_FILE=$GALAXY_HOME/tool_sheds_conf.xml
-fi
-
-# Remove all default tools from Galaxy by default
-if [[ ! -z $BARE ]]
- then
- echo "Remove all tools from the tool_conf.xml file."
- export GALAXY_CONFIG_TOOL_CONFIG_FILE=config/shed_tool_conf.xml,$GALAXY_ROOT/test/functional/tools/upload_tool_conf.xml
-fi
-
-# If auto installing conda envs, make sure bcftools is installed for __set_metadata__ tool
-if [[ ! -z $GALAXY_CONFIG_CONDA_AUTO_INSTALL ]]
- then
- if [ ! -d "/tool_deps/_conda/envs/__bcftools@1.5" ]; then
- su $GALAXY_USER -c "/tool_deps/_conda/bin/conda create -y --override-channels --channel iuc --channel conda-forge --channel bioconda --channel defaults --name __bcftools@1.5 bcftools=1.5"
- su $GALAXY_USER -c "/tool_deps/_conda/bin/conda clean --tarballs --yes"
- fi
-fi
-
-if [[ ! -z $GALAXY_EXTRAS_CONFIG_POSTGRES ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- # Backward compatibility for exported postgresql directories before version 15.08.
- # In previous versions postgres has the UID/GID of 102/106. We changed this in
- # https://github.com/bgruening/docker-galaxy-stable/pull/71 to GALAXY_POSTGRES_UID=1550 and
- # GALAXY_POSTGRES_GID=1550
- if [ -e /export/postgresql/ ];
- then
- if [ `stat -c %g /export/postgresql/` == "106" ];
- then
- chown -R postgres:postgres /export/postgresql/
- fi
- fi
- fi
-fi
-
-
-if [[ ! -z $GALAXY_EXTRAS_CONFIG_CONDOR ]]; then
- if [[ ! -z $ENABLE_CONDOR ]]
- then
- if [[ ! -z $CONDOR_HOST ]]
- then
- echo "Enabling Condor with external scheduler at $CONDOR_HOST"
- echo "# Config generated by startup.sh
-CONDOR_HOST = $CONDOR_HOST
-ALLOW_ADMINISTRATOR = *
-ALLOW_OWNER = *
-ALLOW_READ = *
-ALLOW_WRITE = *
-ALLOW_CLIENT = *
-ALLOW_NEGOTIATOR = *
-DAEMON_LIST = MASTER, SCHEDD
-UID_DOMAIN = galaxy
-DISCARD_SESSION_KEYRING_ON_STARTUP = False
-TRUST_UID_DOMAIN = true" > /etc/condor/condor_config.local
- fi
-
- if [[ -e /export/condor_config ]]
- then
- echo "Replacing Condor config by locally supplied config from /export/condor_config"
- rm -f /etc/condor/condor_config
- ln -s /export/condor_config /etc/condor/condor_config
- fi
- fi
-fi
-
-
-# Copy or link the slurm/munge config files
-if [ -e /export/slurm.conf ]
-then
- rm -f /etc/slurm-llnl/slurm.conf
- ln -s /export/slurm.conf /etc/slurm-llnl/slurm.conf
-else
- # Configure SLURM with runtime hostname.
- # Use absolute path to python so virtualenv is not used.
- /usr/bin/python /usr/sbin/configure_slurm.py
-fi
-if [ -e /export/munge.key ]
-then
- rm -f /etc/munge/munge.key
- ln -s /export/munge.key /etc/munge/munge.key
- chmod 400 /export/munge.key
-fi
-
-# link the gridengine config file
-if [ -e /export/act_qmaster ]
-then
- rm -f /var/lib/gridengine/default/common/act_qmaster
- ln -s /export/act_qmaster /var/lib/gridengine/default/common/act_qmaster
-fi
-
-# Waits until postgres is ready
-function wait_for_postgres {
- echo "Checking if database is up and running"
- until /usr/local/bin/check_database.py 2>&1 >/dev/null; do sleep 1; echo "Waiting for database"; done
- echo "Database connected"
-}
-
-# $NONUSE can be set to include cron, proftp, reports or nodejs
-# if included we will _not_ start these services.
-function start_supervisor {
- supervisord -c /etc/supervisor/supervisord.conf
- sleep 5
-
- if [[ ! -z $SUPERVISOR_MANAGE_POSTGRES && ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- echo "Starting postgres"
- supervisorctl start postgresql
- fi
- fi
-
- wait_for_postgres
-
- # Make sure the database is automatically updated
- if [[ ! -z $GALAXY_AUTO_UPDATE_DB ]]
- then
- echo "Updating Galaxy database"
- sh manage_db.sh -c /etc/galaxy/galaxy.yml upgrade
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_CRON ]]; then
- if [[ $NONUSE != *"cron"* ]]
- then
- echo "Starting cron"
- supervisorctl start cron
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_PROFTP ]]; then
- if [[ $NONUSE != *"proftp"* ]]
- then
- echo "Starting ProFTP"
- supervisorctl start proftpd
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_REPORTS ]]; then
- if [[ $NONUSE != *"reports"* ]]
- then
- echo "Starting Galaxy reports webapp"
- supervisorctl start reports
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_IE_PROXY ]]; then
- if [[ $NONUSE != *"nodejs"* ]]
- then
- echo "Starting nodejs"
- supervisorctl start galaxy:galaxy_nodejs_proxy
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_CONDOR ]]; then
- if [[ $NONUSE != *"condor"* ]]
- then
- echo "Starting condor"
- supervisorctl start condor
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_SLURM ]]; then
- if [[ $NONUSE != *"slurmctld"* ]]
- then
- echo "Starting slurmctld"
- supervisorctl start slurmctld
- fi
- if [[ $NONUSE != *"slurmd"* ]]
- then
- echo "Starting slurmd"
- supervisorctl start slurmd
- fi
- supervisorctl start munge
- else
- if [[ $NONUSE != *"slurmctld"* ]]
- then
- echo "Starting slurmctld"
- /usr/sbin/slurmctld -L $GALAXY_LOGS_DIR/slurmctld.log
- fi
- if [[ $NONUSE != *"slurmd"* ]]
- then
- echo "Starting slurmd"
- /usr/sbin/slurmd -L $GALAXY_LOGS_DIR/slurmd.log
- fi
-
- # We need to run munged regardless
- mkdir -p /var/run/munge && /usr/sbin/munged -f
- fi
-}
-
-if [[ ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- # Change the data_directory of postgresql in the main config file
- ansible localhost -m lineinfile -a "line='data_directory = \'$PG_DATA_DIR_HOST\'' dest=$PG_CONF_DIR_DEFAULT/postgresql.conf backup=yes state=present regexp='data_directory'" &> /dev/null
- fi
-fi
-
-if $PRIVILEGED; then
- echo "Enable Galaxy Interactive Environments."
- export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY="config/plugins/interactive_environments"
- if [ x$DOCKER_PARENT == "x" ]; then
- #build the docker in docker environment
- bash /root/cgroupfs_mount.sh
- start_supervisor
- supervisorctl start docker
- else
- #inheriting /var/run/docker.sock from parent, assume that you need to
- #run docker with sudo to validate
- echo "$GALAXY_USER ALL = NOPASSWD : ALL" >> /etc/sudoers
- start_supervisor
- fi
- if [[ ! -z $PULL_IE_IMAGES ]]; then
- echo "About to pull IE images. Depending on the size, this may take a while!"
-
- for ie in {JUPYTER,RSTUDIO,ETHERCALC,PHINCH,NEO}; do
- enabled_var_name="GALAXY_EXTRAS_IE_FETCH_${ie}";
- if [[ ${!enabled_var_name} ]]; then
- # Store name in a var
- image_var_name="GALAXY_EXTRAS_${ie}_IMAGE"
- # And then read from that var
- docker pull "${!image_var_name}"
- fi
- done
- fi
-
- # in privileged mode autofs and CVMFS is available
- # install autofs
- echo "Installing autofs to enable automatic CVMFS mounts"
- apt-get install autofs --no-install-recommends -y
- apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/*
-else
- echo "Disable Galaxy Interactive Environments. Start with --privileged to enable IE's."
- export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""
- start_supervisor
-fi
-
-if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]
-then
- echo "Settting up letsencrypt"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=letsencrypt \
- --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
-fi
-if [ "$USE_HTTPS" != "False" ]
-then
- if [ -f /export/server.key -a -f /export/server.crt ]
- then
- echo "Copying SSL keys"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=own \
- --extra-vars src_nginx_ssl_certificate_key=/export/server.key \
- --extra-vars src_nginx_ssl_certificate=/export/server.crt \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
- else
- echo "Setting up self-signed SSL keys"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=self-signed \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
- fi
-fi
-
-# In case the user wants the default admin to be created, do so.
-if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]
- then
- echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"
- python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \
- -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"
- # If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.
- # then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables
- # visible here.
- # The file needs to be executable (chmod a+x post-start-actions.sh)
-fi
-if [ -x /export/post-start-actions.sh ]
- then
- # uses ephemeris, present in docker-galaxy-stable, to wait for the local instance
- /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log &&
- /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &
-fi
-
-
-# Reinstall tools if the user want to
-if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]
- then
- /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&
- OLDIFS=$IFS
- IFS=','
- for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`
- do
- echo "Installing tools from $TOOL_YML"
- /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"
- /tool_deps/_conda/bin/conda clean --tarballs --yes
- done
- IFS=$OLDIFS
-fi
-
-# migrate custom IEs or Visualisations (Galaxy plugins)
-# this is needed for by the new client build system
-python3 ${GALAXY_ROOT}/scripts/plugin_staging.py
-
-# Enable verbose output
-if [ `echo ${GALAXY_LOGGING:-'no'} | tr [:upper:] [:lower:]` = "full" ]
- then
- tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log
- else
- tail -f $GALAXY_LOGS_DIR/*.log
-fi
-
diff -r b938475235e3 -r e7e9732ebed6 docker/startup.sh
--- a/docker/startup.sh Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,462 +0,0 @@
-#!/usr/bin/env bash
-
-# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/
-
-if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then
- mkdir -p /export/tool_deps/
- mv /export/galaxy-central/tool_deps /export/
- ln -s /export/tool_deps/ $GALAXY_ROOT/
-fi
-
-# This is needed for Docker compose to have a unified alias for the main container.
-# Modifying /etc/hosts can only happen during runtime not during build-time
-echo "127.0.0.1 galaxy" >> /etc/hosts
-
-# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2
-ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null
-
-# If the Galaxy config file is not in the expected place, copy from the sample
-# and hope for the best (that the admin has done all the setup through env vars.)
-if [ ! -f $GALAXY_CONFIG_FILE ]
- then
- # this should succesfully copy either .yml or .ini sample file to the expected location
- cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE
-fi
-
-# Configure proxy prefix filtering
-if [[ ! -z $PROXY_PREFIX ]]
- then
- if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]
- then
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null
- else
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ module:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ socket:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ mount:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ manage-script-name:' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' manage-script-name: true'" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()'" &> /dev/null
- ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' socket: unix:///srv/galaxy/var/uwsgi.sock'" &> /dev/null
-
- # Also set SCRIPT_NAME. It's not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm
- ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp='^ uwsgi_param SCRIPT_NAME' state=absent" &> /dev/null
- ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter='^ include uwsgi_params' line=' uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};'" &> /dev/null
- fi
-
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null
- ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null
-
- # Fix path to html assets
- ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp='(href=\"|\')[/\\w]*(/static)' replace='\\1${PROXY_PREFIX}\\2'" &> /dev/null
-
- # Set some other vars based on that prefix
- if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]
- then
- export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"
- fi
- if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]
- then
- export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"
- fi
-
- # Change the defaults nginx upload/x-accel paths
- if [ "$GALAXY_CONFIG_NGINX_UPLOAD_PATH" == "/_upload" ]
- then
- export GALAXY_CONFIG_NGINX_UPLOAD_PATH="${PROXY_PREFIX}${GALAXY_CONFIG_NGINX_UPLOAD_PATH}"
- fi
-fi
-
-# Disable authentication of Galaxy reports
-if [[ ! -z $DISABLE_REPORTS_AUTH ]]
- then
- # disable authentification
- echo "Disable Galaxy reports authentification "
- echo "" > /etc/nginx/conf.d/reports_auth.conf
- else
- # enable authentification
- echo "Enable Galaxy reports authentification "
- cp /etc/nginx/conf.d/reports_auth.conf.source /etc/nginx/conf.d/reports_auth.conf
-fi
-
-# Try to guess if we are running under --privileged mode
-if [[ ! -z $HOST_DOCKER_LEGACY ]]; then
- if mount | grep "/proc/kcore"; then
- PRIVILEGED=false
- else
- PRIVILEGED=true
- fi
-else
- # Taken from http://stackoverflow.com/questions/32144575/how-to-know-if-a-docker-container-is-running-in-privileged-mode
- ip link add dummy0 type dummy 2>/dev/null
- if [[ $? -eq 0 ]]; then
- PRIVILEGED=true
- # clean the dummy0 link
- ip link delete dummy0 2>/dev/null
- else
- PRIVILEGED=false
- fi
-fi
-
-cd $GALAXY_ROOT
-. $GALAXY_VIRTUAL_ENV/bin/activate
-
-if $PRIVILEGED; then
- umount /var/lib/docker
-fi
-
-if [[ ! -z $STARTUP_EXPORT_USER_FILES ]]; then
- # If /export/ is mounted, export_user_files file moving all data to /export/
- # symlinks will point from the original location to the new path under /export/
- # If /export/ is not given, nothing will happen in that step
- echo "Checking /export..."
- python3 /usr/local/bin/export_user_files.py $PG_DATA_DIR_DEFAULT
-fi
-
-# Delete compiled templates in case they are out of date
-if [[ ! -z $GALAXY_CONFIG_TEMPLATE_CACHE_PATH ]]; then
- rm -rf $GALAXY_CONFIG_TEMPLATE_CACHE_PATH/*
-fi
-
-# Enable loading of dependencies on startup. Such as LDAP.
-# Adapted from galaxyproject/galaxy/scripts/common_startup.sh
-if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]]
- then
- echo "Installing optional dependencies in galaxy virtual environment..."
- : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
- GALAXY_CONDITIONAL_DEPENDENCIES=$(PYTHONPATH=lib python -c "import galaxy.dependencies; print('\n'.join(galaxy.dependencies.optional('$GALAXY_CONFIG_FILE')))")
- [ -z "$GALAXY_CONDITIONAL_DEPENDENCIES" ] || echo "$GALAXY_CONDITIONAL_DEPENDENCIES" | pip install -q -r /dev/stdin --index-url "${GALAXY_WHEELS_INDEX_URL}"
-fi
-
-if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]] && [[ ! -z $LOAD_PYTHON_DEV_DEPENDENCIES ]]
- then
- echo "Installing development requirements in galaxy virtual environment..."
- : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
- dev_requirements='./lib/galaxy/dependencies/dev-requirements.txt'
- [ -f $dev_requirements ] && pip install -q -r $dev_requirements --index-url "${GALAXY_WHEELS_INDEX_URL}"
-fi
-
-# Enable Test Tool Shed
-if [[ ! -z $ENABLE_TTS_INSTALL ]]
- then
- echo "Enable installation from the Test Tool Shed."
- export GALAXY_CONFIG_TOOL_SHEDS_CONFIG_FILE=$GALAXY_HOME/tool_sheds_conf.xml
-fi
-
-# Remove all default tools from Galaxy by default
-if [[ ! -z $BARE ]]
- then
- echo "Remove all tools from the tool_conf.xml file."
- export GALAXY_CONFIG_TOOL_CONFIG_FILE=config/shed_tool_conf.xml,$GALAXY_ROOT/test/functional/tools/upload_tool_conf.xml
-fi
-
-# If auto installing conda envs, make sure bcftools is installed for __set_metadata__ tool
-if [[ ! -z $GALAXY_CONFIG_CONDA_AUTO_INSTALL ]]
- then
- if [ ! -d "/tool_deps/_conda/envs/__bcftools@1.5" ]; then
- su $GALAXY_USER -c "/tool_deps/_conda/bin/conda create -y --override-channels --channel iuc --channel conda-forge --channel bioconda --channel defaults --name __bcftools@1.5 bcftools=1.5"
- su $GALAXY_USER -c "/tool_deps/_conda/bin/conda clean --tarballs --yes"
- fi
-fi
-
-if [[ ! -z $GALAXY_EXTRAS_CONFIG_POSTGRES ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- # Backward compatibility for exported postgresql directories before version 15.08.
- # In previous versions postgres has the UID/GID of 102/106. We changed this in
- # https://github.com/bgruening/docker-galaxy-stable/pull/71 to GALAXY_POSTGRES_UID=1550 and
- # GALAXY_POSTGRES_GID=1550
- if [ -e /export/postgresql/ ];
- then
- if [ `stat -c %g /export/postgresql/` == "106" ];
- then
- chown -R postgres:postgres /export/postgresql/
- fi
- fi
- fi
-fi
-
-
-if [[ ! -z $GALAXY_EXTRAS_CONFIG_CONDOR ]]; then
- if [[ ! -z $ENABLE_CONDOR ]]
- then
- if [[ ! -z $CONDOR_HOST ]]
- then
- echo "Enabling Condor with external scheduler at $CONDOR_HOST"
- echo "# Config generated by startup.sh
-CONDOR_HOST = $CONDOR_HOST
-ALLOW_ADMINISTRATOR = *
-ALLOW_OWNER = *
-ALLOW_READ = *
-ALLOW_WRITE = *
-ALLOW_CLIENT = *
-ALLOW_NEGOTIATOR = *
-DAEMON_LIST = MASTER, SCHEDD
-UID_DOMAIN = galaxy
-DISCARD_SESSION_KEYRING_ON_STARTUP = False
-TRUST_UID_DOMAIN = true" > /etc/condor/condor_config.local
- fi
-
- if [[ -e /export/condor_config ]]
- then
- echo "Replacing Condor config by locally supplied config from /export/condor_config"
- rm -f /etc/condor/condor_config
- ln -s /export/condor_config /etc/condor/condor_config
- fi
- fi
-fi
-
-
-# Copy or link the slurm/munge config files
-if [ -e /export/slurm.conf ]
-then
- rm -f /etc/slurm-llnl/slurm.conf
- ln -s /export/slurm.conf /etc/slurm-llnl/slurm.conf
-else
- # Configure SLURM with runtime hostname.
- # Use absolute path to python so virtualenv is not used.
- /usr/bin/python /usr/sbin/configure_slurm.py
-fi
-if [ -e /export/munge.key ]
-then
- rm -f /etc/munge/munge.key
- ln -s /export/munge.key /etc/munge/munge.key
- chmod 400 /export/munge.key
-fi
-
-# link the gridengine config file
-if [ -e /export/act_qmaster ]
-then
- rm -f /var/lib/gridengine/default/common/act_qmaster
- ln -s /export/act_qmaster /var/lib/gridengine/default/common/act_qmaster
-fi
-
-# Waits until postgres is ready
-function wait_for_postgres {
- echo "Checking if database is up and running"
- until /usr/local/bin/check_database.py 2>&1 >/dev/null; do sleep 1; echo "Waiting for database"; done
- echo "Database connected"
-}
-
-# $NONUSE can be set to include cron, proftp, reports or nodejs
-# if included we will _not_ start these services.
-function start_supervisor {
- supervisord -c /etc/supervisor/supervisord.conf
- sleep 5
-
- if [[ ! -z $SUPERVISOR_MANAGE_POSTGRES && ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- echo "Starting postgres"
- supervisorctl start postgresql
- fi
- fi
-
- wait_for_postgres
-
- # Make sure the database is automatically updated
- if [[ ! -z $GALAXY_AUTO_UPDATE_DB ]]
- then
- echo "Updating Galaxy database"
- sh manage_db.sh -c /etc/galaxy/galaxy.yml upgrade
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_CRON ]]; then
- if [[ $NONUSE != *"cron"* ]]
- then
- echo "Starting cron"
- supervisorctl start cron
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_PROFTP ]]; then
- if [[ $NONUSE != *"proftp"* ]]
- then
- echo "Starting ProFTP"
- supervisorctl start proftpd
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_REPORTS ]]; then
- if [[ $NONUSE != *"reports"* ]]
- then
- echo "Starting Galaxy reports webapp"
- supervisorctl start reports
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_IE_PROXY ]]; then
- if [[ $NONUSE != *"nodejs"* ]]
- then
- echo "Starting nodejs"
- supervisorctl start galaxy:galaxy_nodejs_proxy
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_CONDOR ]]; then
- if [[ $NONUSE != *"condor"* ]]
- then
- echo "Starting condor"
- supervisorctl start condor
- fi
- fi
-
- if [[ ! -z $SUPERVISOR_MANAGE_SLURM ]]; then
- if [[ $NONUSE != *"slurmctld"* ]]
- then
- echo "Starting slurmctld"
- supervisorctl start slurmctld
- fi
- if [[ $NONUSE != *"slurmd"* ]]
- then
- echo "Starting slurmd"
- supervisorctl start slurmd
- fi
- supervisorctl start munge
- else
- if [[ $NONUSE != *"slurmctld"* ]]
- then
- echo "Starting slurmctld"
- /usr/sbin/slurmctld -L $GALAXY_LOGS_DIR/slurmctld.log
- fi
- if [[ $NONUSE != *"slurmd"* ]]
- then
- echo "Starting slurmd"
- /usr/sbin/slurmd -L $GALAXY_LOGS_DIR/slurmd.log
- fi
-
- # We need to run munged regardless
- mkdir -p /var/run/munge && /usr/sbin/munged -f
- fi
-}
-
-if [[ ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
- if [[ $NONUSE != *"postgres"* ]]
- then
- # Change the data_directory of postgresql in the main config file
- ansible localhost -m lineinfile -a "line='data_directory = \'$PG_DATA_DIR_HOST\'' dest=$PG_CONF_DIR_DEFAULT/postgresql.conf backup=yes state=present regexp='data_directory'" &> /dev/null
- fi
-fi
-
-if $PRIVILEGED; then
- echo "Enable Galaxy Interactive Environments."
- export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY="config/plugins/interactive_environments"
- if [ x$DOCKER_PARENT == "x" ]; then
- #build the docker in docker environment
- bash /root/cgroupfs_mount.sh
- start_supervisor
- supervisorctl start docker
- else
- #inheriting /var/run/docker.sock from parent, assume that you need to
- #run docker with sudo to validate
- echo "$GALAXY_USER ALL = NOPASSWD : ALL" >> /etc/sudoers
- start_supervisor
- fi
- if [[ ! -z $PULL_IE_IMAGES ]]; then
- echo "About to pull IE images. Depending on the size, this may take a while!"
-
- for ie in {JUPYTER,RSTUDIO,ETHERCALC,PHINCH,NEO}; do
- enabled_var_name="GALAXY_EXTRAS_IE_FETCH_${ie}";
- if [[ ${!enabled_var_name} ]]; then
- # Store name in a var
- image_var_name="GALAXY_EXTRAS_${ie}_IMAGE"
- # And then read from that var
- docker pull "${!image_var_name}"
- fi
- done
- fi
-
- # in privileged mode autofs and CVMFS is available
- # install autofs
- echo "Installing autofs to enable automatic CVMFS mounts"
- apt-get install autofs --no-install-recommends -y
- apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/*
-else
- echo "Disable Galaxy Interactive Environments. Start with --privileged to enable IE's."
- export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""
- start_supervisor
-fi
-
-if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]
-then
- echo "Settting up letsencrypt"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=letsencrypt \
- --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
-fi
-if [ "$USE_HTTPS" != "False" ]
-then
- if [ -f /export/server.key -a -f /export/server.crt ]
- then
- echo "Copying SSL keys"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=own \
- --extra-vars src_nginx_ssl_certificate_key=/export/server.key \
- --extra-vars src_nginx_ssl_certificate=/export/server.crt \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
- else
- echo "Setting up self-signed SSL keys"
- ansible-playbook -c local /ansible/provision.yml \
- --extra-vars gather_facts=False \
- --extra-vars galaxy_extras_config_ssl=True \
- --extra-vars galaxy_extras_config_ssl_method=self-signed \
- --extra-vars galaxy_extras_config_nginx_upload=False \
- --tags https
- fi
-fi
-
-# In case the user wants the default admin to be created, do so.
-if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]
- then
- echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"
- python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \
- -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"
-fi
-# If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.
-# then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables
-# visible here.
-# The file needs to be executable (chmod a+x post-start-actions.sh)
-# uses ephemeris, present in docker-galaxy-stable, to wait for the local instance
-
-if [[ -f /export/post-start-actions.sh ]]
- then
- /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log
- /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &
- else
- echo "No /export/post-start-actions.sh found or not executable so not running" >> $GALAXY_LOGS_DIR/post-start-actions.log
-fi
-
-
-# Reinstall tools if the user want to
-if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]
- then
- /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&
- OLDIFS=$IFS
- IFS=','
- for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`
- do
- echo "Installing tools from $TOOL_YML"
- /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"
- /tool_deps/_conda/bin/conda clean --tarballs --yes
- done
- IFS=$OLDIFS
-fi
-
-# migrate custom IEs or Visualisations (Galaxy plugins)
-# this is needed for by the new client build system
-python3 ${GALAXY_ROOT}/scripts/plugin_staging.py
-
-# Enable verbose output
-if [ `echo ${GALAXY_LOGGING:-'no'} | tr [:upper:] [:lower:]` = "full" ]
- then
- tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log
- else
- tail -f $GALAXY_LOGS_DIR/*.log
-fi
diff -r b938475235e3 -r e7e9732ebed6 html_dir.py
--- a/html_dir.py Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,180 +0,0 @@
-
-class tooloutHTMLifyer(self):
-
- def compressPDF(self,inpdf=None,thumbformat='png'):
- """need absolute path to pdf
- note that GS gets confoozled if no $TMP or $TEMP
- so we set it
- """
- assert os.path.isfile(inpdf), "## Input %s supplied to %s compressPDF not found" % (inpdf,self.myName)
- hlog = os.path.join(self.opts.output_dir,"compress_%s.txt" % os.path.basename(inpdf))
- sto = open(hlog,'a')
- our_env = os.environ.copy()
- our_tmp = our_env.get('TMP',None)
- if not our_tmp:
- our_tmp = our_env.get('TEMP',None)
- if not (our_tmp and os.path.exists(our_tmp)):
- newtmp = os.path.join(self.opts.output_dir,'tmp')
- try:
- os.mkdir(newtmp)
- except:
- sto.write('## WARNING - cannot make %s - it may exist or permissions need fixing\n' % newtmp)
- our_env['TEMP'] = newtmp
- if not self.temp_warned:
- sto.write('## WARNING - no $TMP or $TEMP!!! Please fix - using %s temporarily\n' % newtmp)
- self.temp_warned = True
- outpdf = '%s_compressed' % inpdf
- cl = ["gs", "-sDEVICE=pdfwrite", "-dNOPAUSE", "-dUseCIEColor", "-dBATCH","-dPDFSETTINGS=/printer", "-sOutputFile=%s" % outpdf,inpdf]
- x = subprocess.Popen(cl,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
- retval1 = x.wait()
- sto.close()
- if retval1 == 0:
- os.unlink(inpdf)
- shutil.move(outpdf,inpdf)
- os.unlink(hlog)
- hlog = os.path.join(self.opts.output_dir,"thumbnail_%s.txt" % os.path.basename(inpdf))
- sto = open(hlog,'w')
- outpng = '%s.%s' % (os.path.splitext(inpdf)[0],thumbformat)
- if self.useGM:
- cl2 = ['gm', 'convert', inpdf, outpng]
- else: # assume imagemagick
- cl2 = ['convert', inpdf, outpng]
- x = subprocess.Popen(cl2,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
- retval2 = x.wait()
- sto.close()
- if retval2 == 0:
- os.unlink(hlog)
- retval = retval1 or retval2
- return retval
-
-
- def getfSize(self,fpath,outpath):
- """
- format a nice file size string
- """
- size = ''
- fp = os.path.join(outpath,fpath)
- if os.path.isfile(fp):
- size = '0 B'
- n = float(os.path.getsize(fp))
- if n > 2**20:
- size = '%1.1f MB' % (n/2**20)
- elif n > 2**10:
- size = '%1.1f KB' % (n/2**10)
- elif n > 0:
- size = '%d B' % (int(n))
- return size
-
- def makeHtml(self):
- """ Create an HTML file content to list all the artifacts found in the output_dir
- """
-
- galhtmlprefix = """
-
-
-
-
-
-
-
-
- """
- galhtmlattr = """
"""
- galhtmlpostfix = """
\n"""
-
- flist = os.listdir(self.opts.output_dir)
- flist = [x for x in flist if x != 'Rplots.pdf']
- flist.sort()
- html = []
- html.append(galhtmlprefix % progname)
- html.append('Galaxy Tool "%s" run at %s
' % (self.toolname,timenow()))
- fhtml = []
- if len(flist) > 0:
- logfiles = [x for x in flist if x.lower().endswith('.log')] # log file names determine sections
- logfiles.sort()
- logfiles = [x for x in logfiles if os.path.abspath(x) != os.path.abspath(self.tlog)]
- logfiles.append(os.path.abspath(self.tlog)) # make it the last one
- pdflist = []
- npdf = len([x for x in flist if os.path.splitext(x)[-1].lower() == '.pdf'])
- for rownum,fname in enumerate(flist):
- dname,e = os.path.splitext(fname)
- sfsize = self.getfSize(fname,self.opts.output_dir)
- if e.lower() == '.pdf' : # compress and make a thumbnail
- thumb = '%s.%s' % (dname,self.thumbformat)
- pdff = os.path.join(self.opts.output_dir,fname)
- retval = self.compressPDF(inpdf=pdff,thumbformat=self.thumbformat)
- if retval == 0:
- pdflist.append((fname,thumb))
- else:
- pdflist.append((fname,fname))
- if (rownum+1) % 2 == 0:
- fhtml.append('%s %s ' % (fname,fname,sfsize))
- else:
- fhtml.append('%s %s ' % (fname,fname,sfsize))
- for logfname in logfiles: # expect at least tlog - if more
- if os.path.abspath(logfname) == os.path.abspath(self.tlog): # handled later
- sectionname = 'All tool run'
- if (len(logfiles) > 1):
- sectionname = 'Other'
- ourpdfs = pdflist
- else:
- realname = os.path.basename(logfname)
- sectionname = os.path.splitext(realname)[0].split('_')[0] # break in case _ added to log
- ourpdfs = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] == sectionname]
- pdflist = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] != sectionname] # remove
- nacross = 1
- npdf = len(ourpdfs)
-
- if npdf > 0:
- nacross = math.sqrt(npdf) ## int(round(math.log(npdf,2)))
- if int(nacross)**2 != npdf:
- nacross += 1
- nacross = int(nacross)
- width = min(400,int(1200/nacross))
- html.append('%s images and outputs
' % sectionname)
- html.append('(Click on a thumbnail image to download the corresponding original PDF image) ')
- ntogo = nacross # counter for table row padding with empty cells
- html.append('\n')
- for i,paths in enumerate(ourpdfs):
- fname,thumb = paths
- s= """ \n""" % (fname,thumb,fname,width,fname)
- if ((i+1) % nacross == 0):
- s += ' \n'
- ntogo = 0
- if i < (npdf - 1): # more to come
- s += ''
- ntogo = nacross
- else:
- ntogo -= 1
- html.append(s)
- if html[-1].strip().endswith(' '):
- html.append('
\n')
- else:
- if ntogo > 0: # pad
- html.append(' '*ntogo)
- html.append('\n')
- logt = open(logfname,'r').readlines()
- logtext = [x for x in logt if x.strip() > '']
- html.append('%s log output
' % sectionname)
- if len(logtext) > 1:
- html.append('\n\n')
- html += logtext
- html.append('\n \n')
- else:
- html.append('%s is empty ' % logfname)
- if len(fhtml) > 0:
- fhtml.insert(0,'Output File Name (click to view) Size \n')
- fhtml.append('
')
- html.append('All output files available for downloading
\n')
- html += fhtml # add all non-pdf files to the end of the display
- else:
- html.append('### Error - %s returned no files - please confirm that parameters are sane
' % self.opts.interpreter)
- html.append(galhtmlpostfix)
- htmlf = file(self.opts.output_html,'w')
- htmlf.write('\n'.join(html))
- htmlf.write('\n')
- htmlf.close()
- self.html = html
-
-
diff -r b938475235e3 -r e7e9732ebed6 images/dynamicScriptTool.png
Binary file images/dynamicScriptTool.png has changed
diff -r b938475235e3 -r e7e9732ebed6 rgToolFactory2.py
--- a/rgToolFactory2.py Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,746 +0,0 @@
-#!/usr/bin/env python
-# rgToolFactory.py
-# see https://github.com/fubar2/toolfactory
-#
-# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012
-#
-# all rights reserved
-# Licensed under the LGPL
-# suggestions for improvement and bug fixes welcome at https://github.com/fubar2/toolfactory
-#
-# July 2020: BCC was fun and I feel like rip van winkle after 5 years.
-# Decided to
-# 1. Fix the toolfactory so it works - done for simplest case
-# 2. Fix planemo so the toolfactory function works
-# 3. Rewrite bits using galaxyxml functions where that makes sense - done
-#
-# removed all the old complications including making the new tool use this same script
-# galaxyxml now generates the tool xml https://github.com/hexylena/galaxyxml
-# No support for automatic HTML file creation from arbitrary outputs
-# essential problem is to create two command lines - one for the tool xml and a different
-# one to run the executable with the supplied test data and settings
-# Be simpler to write the tool, then run it with planemo and soak up the test outputs.
-
-
-
-import argparse
-import logging
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-import time
-
-import galaxyxml.tool as gxt
-import galaxyxml.tool.parameters as gxtp
-
-import lxml
-
-myversion = "V2.1 July 2020"
-verbose = True
-debug = True
-toolFactoryURL = "https://github.com/fubar2/toolfactory"
-ourdelim = "~~~"
-
-# --input_files="$input_files~~~$CL~~~$input_formats~~~$input_label
-# ~~~$input_help"
-IPATHPOS = 0
-ICLPOS = 1
-IFMTPOS = 2
-ILABPOS = 3
-IHELPOS = 4
-IOCLPOS = 5
-
-# --output_files "$otab.history_name~~~$otab.history_format~~~$otab.CL
-ONAMEPOS = 0
-OFMTPOS = 1
-OCLPOS = 2
-OOCLPOS = 3
-
-# --additional_parameters="$i.param_name~~~$i.param_value~~~
-# $i.param_label~~~$i.param_help~~~$i.param_type~~~$i.CL~~~i$.param_CLoverride"
-ANAMEPOS = 0
-AVALPOS = 1
-ALABPOS = 2
-AHELPPOS = 3
-ATYPEPOS = 4
-ACLPOS = 5
-AOVERPOS = 6
-AOCLPOS = 7
-
-
-foo = len(lxml.__version__)
-# fug you, flake8. Say my name!
-
-def timenow():
- """return current time as a string
- """
- return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time()))
-
-
-def quote_non_numeric(s):
- """return a prequoted string for non-numerics
- useful for perl and Rscript parameter passing?
- """
- try:
- _ = float(s)
- return s
- except ValueError:
- return '"%s"' % s
-
-
-html_escape_table = {"&": "&", ">": ">", "<": "<", "$": r"\$"}
-
-
-def html_escape(text):
- """Produce entities within text."""
- return "".join(html_escape_table.get(c, c) for c in text)
-
-
-def html_unescape(text):
- """Revert entities within text. Multiple character targets so use replace"""
- t = text.replace("&", "&")
- t = t.replace(">", ">")
- t = t.replace("<", "<")
- t = t.replace("\\$", "$")
- return t
-
-
-def parse_citations(citations_text):
- """
- """
- citations = [c for c in citations_text.split("**ENTRY**") if c.strip()]
- citation_tuples = []
- for citation in citations:
- if citation.startswith("doi"):
- citation_tuples.append(("doi", citation[len("doi") :].strip()))
- else:
- citation_tuples.append(
- ("bibtex", citation[len("bibtex") :].strip())
- )
- return citation_tuples
-
-
-class ScriptRunner:
- """Wrapper for an arbitrary script
- uses galaxyxml
-
- """
-
- def __init__(self, args=None):
- """
- prepare command line cl for running the tool here
- and prepare elements needed for galaxyxml tool generation
- """
-
- self.infiles = [x.split(ourdelim) for x in args.input_files]
- self.outfiles = [x.split(ourdelim) for x in args.output_files]
- self.addpar = [x.split(ourdelim) for x in args.additional_parameters]
- self.args = args
- self.cleanuppar()
- self.lastclredirect = None
- self.lastxclredirect = None
- self.cl = []
- self.xmlcl = []
- self.is_positional = self.args.parampass == "positional"
- aCL = self.cl.append
- assert args.parampass in [
- "0",
- "argparse",
- "positional",
- ], 'Parameter passing in args.parampass must be "0","positional" or "argparse"'
- self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name)
- self.tool_id = self.tool_name
- if self.args.interpreter_name:
- exe = "$runMe"
- else:
- exe = self.args.exe_package
- assert (
- exe is not None
- ), "No interpeter or executable passed in - nothing to run so cannot build"
- self.tool = gxt.Tool(
- self.args.tool_name,
- self.tool_id,
- self.args.tool_version,
- self.args.tool_desc,
- exe,
- )
- self.tinputs = gxtp.Inputs()
- self.toutputs = gxtp.Outputs()
- self.testparam = []
- if (
- self.args.runmode == "Executable" or self.args.runmode == "system"
- ): # binary - no need
- aCL(self.args.exe_package) # this little CL will just run
- else:
- self.prepScript()
- self.elog = "%s_error_log.txt" % self.tool_name
- self.tlog = "%s_runner_log.txt" % self.tool_name
-
- if self.args.parampass == "0":
- self.clsimple()
- else:
- clsuffix = []
- xclsuffix = []
- for i, p in enumerate(self.infiles):
- if p[IOCLPOS] == "STDIN":
- appendme = [
- p[IOCLPOS],
- p[ICLPOS],
- p[IPATHPOS],
- "< %s" % p[IPATHPOS],
- ]
- xappendme = [
- p[IOCLPOS],
- p[ICLPOS],
- p[IPATHPOS],
- "< $%s" % p[ICLPOS],
- ]
- else:
- appendme = [p[IOCLPOS], p[ICLPOS], p[IPATHPOS], ""]
- xappendme = [p[IOCLPOS], p[ICLPOS], "$%s" % p[ICLPOS], ""]
- clsuffix.append(appendme)
- xclsuffix.append(xappendme)
- # print('##infile i=%d, appendme=%s' % (i,appendme))
- for i, p in enumerate(self.outfiles):
- if p[OOCLPOS] == "STDOUT":
- self.lastclredirect = [">", p[ONAMEPOS]]
- self.lastxclredirect = [">", "$%s" % p[OCLPOS]]
- else:
- clsuffix.append([p[OOCLPOS], p[OCLPOS], p[ONAMEPOS], ""])
- xclsuffix.append(
- [p[OOCLPOS], p[OCLPOS], "$%s" % p[ONAMEPOS], ""]
- )
- for p in self.addpar:
- clsuffix.append(
- [p[AOCLPOS], p[ACLPOS], p[AVALPOS], p[AOVERPOS]]
- )
- xclsuffix.append(
- [p[AOCLPOS], p[ACLPOS], '"$%s"' % p[ANAMEPOS], p[AOVERPOS]]
- )
- clsuffix.sort()
- xclsuffix.sort()
- self.xclsuffix = xclsuffix
- self.clsuffix = clsuffix
- if self.args.parampass == "positional":
- self.clpositional()
- else:
- self.clargparse()
-
- def prepScript(self):
- aCL = self.cl.append
- rx = open(self.args.script_path, "r").readlines()
- rx = [x.rstrip() for x in rx]
- rxcheck = [x.strip() for x in rx if x.strip() > ""]
- assert len(rxcheck) > 0, "Supplied script is empty. Cannot run"
- self.script = "\n".join(rx)
- fhandle, self.sfile = tempfile.mkstemp(
- prefix=self.tool_name, suffix="_%s" % (self.args.interpreter_name)
- )
- tscript = open(self.sfile, "w")
- tscript.write(self.script)
- tscript.close()
- self.indentedScript = " %s" % "\n".join(
- [" %s" % html_escape(x) for x in rx]
- )
- self.escapedScript = "%s" % "\n".join(
- [" %s" % html_escape(x) for x in rx]
- )
- art = "%s.%s" % (self.tool_name, self.args.interpreter_name)
- artifact = open(art, "wb")
- if self.args.interpreter_name == "python":
- artifact.write(bytes("#!/usr/bin/env python\n", "utf8"))
- artifact.write(bytes(self.script, "utf8"))
- artifact.close()
- aCL(self.args.interpreter_name)
- aCL(self.sfile)
-
- def cleanuppar(self):
- """ positional parameters are complicated by their numeric ordinal"""
- for i, p in enumerate(self.infiles):
- if self.args.parampass == "positional":
- assert p[ICLPOS].isdigit(), (
- "Positional parameters must be ordinal integers - got %s for %s"
- % (p[ICLPOS], p[ILABPOS])
- )
- p.append(p[ICLPOS])
- if p[ICLPOS].isdigit() or self.args.parampass == "0":
- scl = "input%d" % (i + 1)
- p[ICLPOS] = scl
- self.infiles[i] = p
- for i, p in enumerate(
- self.outfiles
- ): # trying to automagically gather using extensions
- if self.args.parampass == "positional" and p[OCLPOS] != "STDOUT":
- assert p[OCLPOS].isdigit(), (
- "Positional parameters must be ordinal integers - got %s for %s"
- % (p[OCLPOS], p[ONAMEPOS])
- )
- p.append(p[OCLPOS])
- if p[OCLPOS].isdigit() or p[OCLPOS] == "STDOUT":
- scl = p[ONAMEPOS]
- p[OCLPOS] = scl
- self.outfiles[i] = p
- for i, p in enumerate(self.addpar):
- if self.args.parampass == "positional":
- assert p[ACLPOS].isdigit(), (
- "Positional parameters must be ordinal integers - got %s for %s"
- % (p[ACLPOS], p[ANAMEPOS])
- )
- p.append(p[ACLPOS])
- if p[ACLPOS].isdigit():
- scl = "input%s" % p[ACLPOS]
- p[ACLPOS] = scl
- self.addpar[i] = p
-
- def clsimple(self):
- """ no parameters - uses < and > for i/o
- """
- aCL = self.cl.append
- aCL("<")
- aCL(self.infiles[0][IPATHPOS])
- aCL(">")
- aCL(self.outfiles[0][OCLPOS])
- aXCL = self.xmlcl.append
- aXCL("<")
- aXCL("$%s" % self.infiles[0][ICLPOS])
- aXCL(">")
- aXCL("$%s" % self.outfiles[0][ONAMEPOS])
-
- def clpositional(self):
- # inputs in order then params
- aCL = self.cl.append
- for (o_v, k, v, koverride) in self.clsuffix:
- if " " in v:
- aCL("%s" % v)
- else:
- aCL(v)
- aXCL = self.xmlcl.append
- for (o_v, k, v, koverride) in self.xclsuffix:
- aXCL(v)
- if self.lastxclredirect:
- aXCL(self.lastxclredirect[0])
- aXCL(self.lastxclredirect[1])
-
- def clargparse(self):
- """ argparse style
- """
- aCL = self.cl.append
- aXCL = self.xmlcl.append
- # inputs then params in argparse named form
- for (o_v, k, v, koverride) in self.xclsuffix:
- if koverride > "":
- k = koverride
- elif len(k.strip()) == 1:
- k = "-%s" % k
- else:
- k = "--%s" % k
- aXCL(k)
- aXCL(v)
- for (o_v, k, v, koverride) in self.clsuffix:
- if koverride > "":
- k = koverride
- elif len(k.strip()) == 1:
- k = "-%s" % k
- else:
- k = "--%s" % k
- aCL(k)
- aCL(v)
-
- def getNdash(self, newname):
- if self.is_positional:
- ndash = 0
- else:
- ndash = 2
- if len(newname) < 2:
- ndash = 1
- return ndash
-
- def doXMLparam(self):
- """flake8 made me do this..."""
- for p in self.outfiles:
- newname, newfmt, newcl, oldcl = p
- ndash = self.getNdash(newcl)
- aparm = gxtp.OutputData(newcl, format=newfmt, num_dashes=ndash)
- aparm.positional = self.is_positional
- if self.is_positional:
- if oldcl == "STDOUT":
- aparm.positional = 9999999
- aparm.command_line_override = "> $%s" % newcl
- else:
- aparm.positional = int(oldcl)
- aparm.command_line_override = "$%s" % newcl
- self.toutputs.append(aparm)
- tp = gxtp.TestOutput(
- name=newcl, value="%s_sample" % newcl, format=newfmt
- )
- self.testparam.append(tp)
- for p in self.infiles:
- newname = p[ICLPOS]
- newfmt = p[IFMTPOS]
- ndash = self.getNdash(newname)
- if not len(p[ILABPOS]) > 0:
- alab = p[ICLPOS]
- else:
- alab = p[ILABPOS]
- aninput = gxtp.DataParam(
- newname,
- optional=False,
- label=alab,
- help=p[IHELPOS],
- format=newfmt,
- multiple=False,
- num_dashes=ndash,
- )
- aninput.positional = self.is_positional
- self.tinputs.append(aninput)
- tparm = gxtp.TestParam(name=newname, value="%s_sample" % newname)
- self.testparam.append(tparm)
- for p in self.addpar:
- newname, newval, newlabel, newhelp, newtype, newcl, override, oldcl = p
- if not len(newlabel) > 0:
- newlabel = newname
- ndash = self.getNdash(newname)
- if newtype == "text":
- aparm = gxtp.TextParam(
- newname,
- label=newlabel,
- help=newhelp,
- value=newval,
- num_dashes=ndash,
- )
- elif newtype == "integer":
- aparm = gxtp.IntegerParam(
- newname,
- label=newname,
- help=newhelp,
- value=newval,
- num_dashes=ndash,
- )
- elif newtype == "float":
- aparm = gxtp.FloatParam(
- newname,
- label=newname,
- help=newhelp,
- value=newval,
- num_dashes=ndash,
- )
- else:
- raise ValueError(
- 'Unrecognised parameter type "%s" for\
- additional parameter %s in makeXML'
- % (newtype, newname)
- )
- aparm.positional = self.is_positional
- if self.is_positional:
- aninput.positional = int(oldcl)
- self.tinputs.append(aparm)
- self.tparm = gxtp.TestParam(newname, value=newval)
- self.testparam.append(tparm)
-
- def doNoXMLparam(self):
- alab = self.infiles[0][ILABPOS]
- if len(alab) == 0:
- alab = self.infiles[0][ICLPOS]
- max1s = (
- "Maximum one input if parampass is 0 - more than one input files supplied - %s"
- % str(self.infiles)
- )
- assert len(self.infiles) == 1, max1s
- newname = self.infiles[0][ICLPOS]
- aninput = gxtp.DataParam(
- newname,
- optional=False,
- label=alab,
- help=self.infiles[0][IHELPOS],
- format=self.infiles[0][IFMTPOS],
- multiple=False,
- num_dashes=0,
- )
- aninput.command_line_override = "< $%s" % newname
- aninput.positional = self.is_positional
- self.tinputs.append(aninput)
- tp = gxtp.TestParam(name=newname, value="%s_sample" % newname)
- self.testparam.append(tp)
- newname = self.outfiles[0][OCLPOS]
- newfmt = self.outfiles[0][OFMTPOS]
- anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0)
- anout.command_line_override = "> $%s" % newname
- anout.positional = self.is_positional
- self.toutputs.append(anout)
- tp = gxtp.TestOutput(
- name=newname, value="%s_sample" % newname, format=newfmt
- )
- self.testparam.append(tp)
-
- def makeXML(self):
- """
- Create a Galaxy xml tool wrapper for the new script
- Uses galaxyhtml
- Hmmm. How to get the command line into correct order...
- """
- self.tool.command_line_override = self.xmlcl
- if self.args.interpreter_name:
- self.tool.interpreter = self.args.interpreter_name
- if self.args.help_text:
- helptext = open(self.args.help_text, "r").readlines()
- helptext = [html_escape(x) for x in helptext]
- self.tool.help = "".join([x for x in helptext])
- else:
- self.tool.help = (
- "Please ask the tool author (%s) for help \
- as none was supplied at tool generation\n"
- % (self.args.user_email)
- )
- self.tool.version_command = None # do not want
- requirements = gxtp.Requirements()
-
- if self.args.interpreter_name:
- if self.args.interpreter_name == "python":
- requirements.append(
- gxtp.Requirement(
- "package", "python", self.args.interpreter_version
- )
- )
- elif self.args.interpreter_name not in ["bash", "sh"]:
- requirements.append(
- gxtp.Requirement(
- "package",
- self.args.interpreter_name,
- self.args.interpreter_version,
- )
- )
- else:
- if self.args.exe_package and self.args.parampass != "system":
- requirements.append(
- gxtp.Requirement(
- "package",
- self.args.exe_package,
- self.args.exe_package_version,
- )
- )
- self.tool.requirements = requirements
- if self.args.parampass == "0":
- self.doNoXMLparam()
- else:
- self.doXMLparam()
- self.tool.outputs = self.toutputs
- self.tool.inputs = self.tinputs
- if self.args.runmode not in ["Executable", "system"]:
- configfiles = gxtp.Configfiles()
- configfiles.append(gxtp.Configfile(name="runMe", text=self.script))
- self.tool.configfiles = configfiles
- tests = gxtp.Tests()
- test_a = gxtp.Test()
- for tp in self.testparam:
- test_a.append(tp)
- tests.append(test_a)
- self.tool.tests = tests
- self.tool.add_comment(
- "Created by %s at %s using the Galaxy Tool Factory."
- % (self.args.user_email, timenow())
- )
- self.tool.add_comment("Source in git at: %s" % (toolFactoryURL))
- self.tool.add_comment(
- "Cite: Creating re-usable tools from scripts doi: \
- 10.1093/bioinformatics/bts573"
- )
- exml = self.tool.export()
- xf = open('%s.xml' % self.tool_name, "w")
- xf.write(exml)
- xf.write("\n")
- xf.close()
- # ready for the tarball
-
- def makeTooltar(self):
- """
- a tool is a gz tarball with eg
- /toolname/tool.xml /toolname/tool.py /toolname/test-data/test1_in.foo ...
- NOTE names for test inputs and outputs are munged here so must
- correspond to actual input and output names used on the generated cl
- """
- retval = self.run()
- if retval:
- sys.stderr.write(
- "## Run failed. Cannot build yet. Please fix and retry"
- )
- sys.exit(1)
- tdir = "tfout"
- if not os.path.exists(tdir):
- os.mkdir(tdir)
- self.makeXML()
- testdir = os.path.join(tdir, "test-data")
- if not os.path.exists(testdir):
- os.mkdir(testdir) # make tests directory
- for p in self.infiles:
- pth = p[IPATHPOS]
- dest = os.path.join(testdir, "%s_sample" % p[ICLPOS])
- shutil.copyfile(pth, dest)
- for p in self.outfiles:
- pth = p[OCLPOS]
- if p[OOCLPOS] == "STDOUT" or self.args.parampass == "0":
- pth = p[ONAMEPOS]
- dest = os.path.join(testdir, "%s_sample" % p[ONAMEPOS])
- shutil.copyfile(pth, dest)
- dest = os.path.join(tdir, p[ONAMEPOS])
- shutil.copyfile(pth, dest)
- else:
- pth = p[OCLPOS]
- dest = os.path.join(testdir, "%s_sample" % p[OCLPOS])
- shutil.copyfile(pth, dest)
- dest = os.path.join(tdir, p[OCLPOS])
- shutil.copyfile(pth, dest)
-
- if os.path.exists(self.tlog) and os.stat(self.tlog).st_size > 0:
- shutil.copyfile(self.tlog, os.path.join(testdir, "test1_log_outfiletxt"))
- if self.args.runmode not in ["Executable", "system"]:
- stname = os.path.join(tdir, "%s" % (self.sfile))
- if not os.path.exists(stname):
- shutil.copyfile(self.sfile, stname)
- xreal = '%s.xml' % self.tool_name
- xout = os.path.join(tdir,xreal)
- shutil.copyfile(xreal, xout)
- tarpath = "toolfactory_%s.tgz" % self.tool_name
- tf = tarfile.open(tarpath, "w:gz")
- tf.add(name=tdir, arcname=self.tool_name)
- tf.close()
- shutil.copyfile(tarpath, self.args.new_tool)
- shutil.copyfile(xreal,"tool_xml.txt")
- repdir = "TF_run_report_tempdir"
- if not os.path.exists(repdir):
- os.mkdir(repdir)
- repoutnames = [x[OCLPOS] for x in self.outfiles]
- with os.scandir('.') as outs:
- for entry in outs:
- if entry.name.endswith('.tgz') or not entry.is_file():
- continue
- if entry.name in repoutnames:
- shutil.copyfile(entry.name,os.path.join(repdir,entry.name))
- elif entry.name == "%s.xml" % self.tool_name:
- shutil.copyfile(entry.name,os.path.join(repdir,"new_tool_xml"))
- return retval
-
- def run(self):
- """
- Some devteam tools have this defensive stderr read so I'm keeping with the faith
- Feel free to update.
- """
- s = "run cl=%s" % str(self.cl)
-
- logging.debug(s)
- scl = " ".join(self.cl)
- err = None
- if self.args.parampass != "0":
- ste = open(self.elog, "wb")
- if self.lastclredirect:
- sto = open(
- self.lastclredirect[1], "wb"
- ) # is name of an output file
- else:
- sto = open(self.tlog, "wb")
- sto.write(
- bytes(
- "## Executing Toolfactory generated command line = %s\n"
- % scl,
- "utf8",
- )
- )
- sto.flush()
- p = subprocess.run(self.cl, shell=False, stdout=sto, stderr=ste)
- sto.close()
- ste.close()
- tmp_stderr = open(self.elog, "rb")
- err = ""
- buffsize = 1048576
- try:
- while True:
- err += str(tmp_stderr.read(buffsize))
- if not err or len(err) % buffsize != 0:
- break
- except OverflowError:
- pass
- tmp_stderr.close()
- retval = p.returncode
- else: # work around special case of simple scripts that take stdin and write to stdout
- sti = open(self.infiles[0][IPATHPOS], "rb")
- sto = open(self.outfiles[0][ONAMEPOS], "wb")
- # must use shell to redirect
- p = subprocess.run(self.cl, shell=False, stdout=sto, stdin=sti)
- retval = p.returncode
- sto.close()
- sti.close()
- if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0:
- os.unlink(self.tlog)
- if os.path.isfile(self.elog) and os.stat(self.elog).st_size == 0:
- os.unlink(self.elog)
- if p.returncode != 0 and err: # problem
- sys.stderr.write(err)
- logging.debug("run done")
- return retval
-
-
-def main():
- """
- This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml as:
- rgBaseScriptWrapper.py --script_path "$scriptPath" --tool_name "foo" --interpreter "Rscript"
-
- """
- parser = argparse.ArgumentParser()
- a = parser.add_argument
- a("--script_path", default="")
- a("--tool_name", default=None)
- a("--interpreter_name", default=None)
- a("--interpreter_version", default=None)
- a("--exe_package", default=None)
- a("--exe_package_version", default=None)
- a("--input_files", default=[], action="append")
- a("--output_files", default=[], action="append")
- a("--user_email", default="Unknown")
- a("--bad_user", default=None)
- a("--make_Tool", default=None)
- a("--help_text", default=None)
- a("--tool_desc", default=None)
- a("--tool_version", default=None)
- a("--citations", default=None)
- a("--additional_parameters", action="append", default=[])
- a("--edit_additional_parameters", action="store_true", default=False)
- a("--parampass", default="positional")
- a("--tfout", default="./tfout")
- a("--new_tool", default="new_tool")
- a("--runmode", default=None)
- args = parser.parse_args()
- assert not args.bad_user, (
- 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to "admin_users" in the Galaxy configuration file'
- % (args.bad_user, args.bad_user)
- )
- assert (
- args.tool_name
- ), "## Tool Factory expects a tool name - eg --tool_name=DESeq"
- assert (
- args.interpreter_name or args.exe_package
- ), "## Tool Factory wrapper expects an interpreter or an executable package"
- assert args.exe_package or (
- len(args.script_path) > 0 and os.path.isfile(args.script_path)
- ), "## Tool Factory wrapper expects a script path - eg --script_path=foo.R if no executable"
- args.input_files = [
- x.replace('"', "").replace("'", "") for x in args.input_files
- ]
- # remove quotes we need to deal with spaces in CL params
- for i, x in enumerate(args.additional_parameters):
- args.additional_parameters[i] = args.additional_parameters[i].replace(
- '"', ""
- )
- r = ScriptRunner(args)
- if args.make_Tool:
- retcode = r.makeTooltar()
- else:
- retcode = r.run()
- if retcode:
- sys.exit(retcode) # indicate failure to job runner
-
-
-if __name__ == "__main__":
- main()
diff -r b938475235e3 -r e7e9732ebed6 rgToolFactory2.xml
--- a/rgToolFactory2.xml Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,462 +0,0 @@
-
- Scripts into tools
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Yes, allow user to edit all additional parameters on the generated tool form
- No - use the fixed values for all additional parameters - no user editing
-
-
-
-
-
-
-
-
-
-
- text
- integer
- float
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- python
- galaxyxml
-
-
-
-
-
-#if $interexe.interpreter != "Executable" and $interexe.interpreter != "system" :
-${interexe.dynScript}
-#else:
-$tool_name
-#end if
-
-
- #if $makeMode.make_Tool == "yes":
-${makeMode.help_text}
- #else
-$tool_name help goes here
- #end if
-
-
-#if $makeMode.make_Tool == "yes":
- #for $citation in $makeMode.citations:
- #if $citation.citation_type.type == "bibtex":
- **ENTRY**bibtex
- ${citation.citation_type.bibtex}
- #else
- **ENTRY**doi
- ${citation.citation_type.doi}
- #end if
- #end for
-#end if
-
-
-
-
-
-
-
-
-
-
-
-
-
- An executable binary to be provided and managed by the Conda dependency management subsystem
- python
- Rscript
- perl
- bash
- sh
- A system executable found on the path such as awk/sed
- for testing only - do not use me please
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Argparse style: passed in the form of '--[clname] [value]'
- Positional: Passed in the order of positional ordinals '...foo.bam bar.idx zot.xls'
- No parameters needed because tool reads selected input file from STDIN and writes STDOUT with new history output"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- Generate a Galaxy ToolShed compatible toolshed.gz
- No. Just run the script please
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- DOI
- BibTeX
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- makeMode['make_Tool'] == "yes"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-.. class:: warningmark
-
-**Details and attribution**
-(see GTF_)
-
-**Local Admins ONLY**
-Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool.
-
-**If you find a bug**
-Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_
-
-**What it does**
-This tool optionally generates normal workflow compatible first class Galaxy tools
-
-Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form.
-Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user.
-Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example.
-
-Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime.
-These can be editable by the downstream user or baked in.
-
-When you run this tool, your executable or script and supplied parameter values will be run to produce a canonical
-set of outputs - these are used to construct a test for the new tool.
-
-If tool generation is required, a new tarball compatible with any Galaxy toolshed is created.
-It can be unpacked in your galaxy/tools directory and manually added to tool_conf.xml, or
-installed into any toolshed from where it can be installed into your Galaxy.
-
-
-.. class:: warningmark
-
-**Note to system administrators**
-This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances.
-Admin_users will have the power to do anything they want as the Galaxy user if you install this tool.
-
-.. class:: warningmark
-
-**Use on public servers** is STRONGLY discouraged for obvious reasons
-
-The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation.
-We recommend that you follow the good code hygiene practices associated with safe toolshed practices.
-
-Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing:
-
-::
-
- # reverse order of text by row
- import sys
- inp = sys.argv[1]
- outp = sys.argv[2]
- i = open(inp,'r').readlines()
- o = open(outp,'w')
- for row in i:
- rs = row.rstrip()
- rs = list(rs)
- rs.reverse()
- o.write(''.join(rs))
- o.write('\n')
- o.close()
-
-With argparse style parameters:
-
-::
-
- # reverse order of text by row
- import argparse
- parser = argparse.ArgumentParser()
- a = parser.add_argument
- a('--infile',default='')
- a('--outfile',default=None)
- args = parser.parse_args()
- inp = args.infile
- outp = args.outfile
- i = open(inp,'r').readlines()
- o = open(outp,'w')
- for row in i:
- rs = row.rstrip()
- rs = list(rs)
- rs.reverse()
- o.write(''.join(rs))
- o.write('\n')
- o.close()
-
-
-Paper_ :
-
-Creating re-usable tools from scripts: The Galaxy Tool Factory
-Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
-Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
-
-**Licensing**
-
-Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012
-All rights reserved.
-Licensed under the LGPL_
-
-.. _LGPL: http://www.gnu.org/copyleft/lesser.html
-.. _GTF: https://github.com/fubar2/toolfactory
-.. _Paper: http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573
-
-
-
-
- 10.1093/bioinformatics/bts573
-
-
-
-
diff -r b938475235e3 -r e7e9732ebed6 test-data/input1_sample
--- a/test-data/input1_sample Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,166 +0,0 @@
-*WARNING before you start*
-
- Install this tool on a private Galaxy ONLY
- Please NEVER on a public or production instance
-
-Updated august 2014 by John Chilton adding citation support
-
-Updated august 8 2014 to fix bugs reported by Marius van den Beek
-
-Please cite the resource at
-http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
-if you use this tool in your published work.
-
-**Short Story**
-
-This is an unusual Galaxy tool capable of generating new Galaxy tools.
-It works by exposing *unrestricted* and therefore extremely dangerous scripting
-to all designated administrators of the host Galaxy server, allowing them to
-run scripts in R, python, sh and perl over multiple selected input data sets,
-writing a single new data set as output.
-
-*You have a working r/python/perl/bash script or any executable with positional or argparse style parameters*
-
-It can be turned into an ordinary Galaxy tool in minutes, using a Galaxy tool.
-
-
-**Automated generation of new Galaxy tools for installation into any Galaxy**
-
-A test is generated using small sample test data inputs and parameter settings you supply.
-Once the test case outputs have been produced, they can be used to build a
-new Galaxy tool. The supplied script or executable is baked as a requirement
-into a new, ordinary Galaxy tool, fully workflow compatible out of the box.
-Generated tools are installed via a tool shed by an administrator
-and work exactly like all other Galaxy tools for your users.
-
-**More Detail**
-
-To use the ToolFactory, you should have prepared a script to paste into a
-text box, or have a package in mind and a small test input example ready to select from your history
-to test your new script.
-
-```planemo test rgToolFactory2.xml --galaxy_root ~/galaxy --test_data ~/galaxy/tools/tool_makers/toolfactory/test-data``` works for me
-
-There is an example in each scripting language on the Tool Factory form. You
-can just cut and paste these to try it out - remember to select the right
-interpreter please. You'll also need to create a small test data set using
-the Galaxy history add new data tool.
-
-If the script fails somehow, use the "redo" button on the tool output in
-your history to recreate the form complete with broken script. Fix the bug
-and execute again. Rinse, wash, repeat.
-
-Once the script runs sucessfully, a new Galaxy tool that runs your script
-can be generated. Select the "generate" option and supply some help text and
-names. The new tool will be generated in the form of a new Galaxy datatype
-*toolshed.gz* - as the name suggests, it's an archive ready to upload to a
-Galaxy ToolShed as a new tool repository.
-
-Once it's in a ToolShed, it can be installed into any local Galaxy server
-from the server administrative interface.
-
-Once the new tool is installed, local users can run it - each time, the script
-that was supplied when it was built will be executed with the input chosen
-from the user's history. In other words, the tools you generate with the
-ToolFactory run just like any other Galaxy tool,but run your script every time.
-
-Tool factory tools are perfect for workflow components. One input, one output,
-no variables.
-
-*To fully and safely exploit the awesome power* of this tool,
-Galaxy and the ToolShed, you should be a developer installing this
-tool on a private/personal/scratch local instance where you are an
-admin_user. Then, if you break it, you get to keep all the pieces see
-https://bitbucket.org/fubar/galaxytoolfactory/wiki/Home
-
-**Installation**
-This is a Galaxy tool. You can install it most conveniently using the
-administrative "Search and browse tool sheds" link. Find the Galaxy Main
-toolshed at https://toolshed.g2.bx.psu.edu/ and search for the toolfactory
-repository. Open it and review the code and select the option to install it.
-
-If you can't get the tool that way, the xml and py files here need to be
-copied into a new tools
-subdirectory such as tools/toolfactory Your tool_conf.xml needs a new entry
-pointing to the xml
-file - something like::
-
-
-
-If not already there,
-please add:
-
-to your local data_types_conf.xml.
-
-
-**Restricted execution**
-
-The tool factory tool itself will then be usable ONLY by admin users -
-people with IDs in admin_users in universe_wsgi.ini **Yes, that's right. ONLY
-admin_users can run this tool** Think about it for a moment. If allowed to
-run any arbitrary script on your Galaxy server, the only thing that would
-impede a miscreant bent on destroying all your Galaxy data would probably
-be lack of appropriate technical skills.
-
-**What it does**
-
-This is a tool factory for simple scripts in python, R and
-perl currently. Functional tests are automatically generated. How cool is that.
-
-LIMITED to simple scripts that read one input from the history. Optionally can
-write one new history dataset, and optionally collect any number of outputs
-into links on an autogenerated HTML index page for the user to navigate -
-useful if the script writes images and output files - pdf outputs are shown
-as thumbnails and R's bloated pdf's are shrunk with ghostscript so that and
-imagemagik need to be available.
-
-Generated tools can be edited and enhanced like any Galaxy tool, so start
-small and build up since a generated script gets you a serious leg up to a
-more complex one.
-
-**What you do**
-
-You paste and run your script, you fix the syntax errors and
-eventually it runs. You can use the redo button and edit the script before
-trying to rerun it as you debug - it works pretty well.
-
-Once the script works on some test data, you can generate a toolshed compatible
-gzip file containing your script ready to run as an ordinary Galaxy tool in
-a repository on your local toolshed. That means safe and largely automated
-installation in any production Galaxy configured to use your toolshed.
-
-**Generated tool Security**
-
-Once you install a generated tool, it's just
-another tool - assuming the script is safe. They just run normally and their
-user cannot do anything unusually insecure but please, practice safe toolshed.
-Read the code before you install any tool. Especially this one - it is really scary.
-
-**Send Code**
-
-Patches and suggestions welcome as bitbucket issues please?
-
-**Attribution**
-
-Creating re-usable tools from scripts: The Galaxy Tool Factory
-Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
-Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
-
-http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
-
-**Licensing**
-
-Copyright Ross Lazarus 2010
-ross lazarus at g mail period com
-
-All rights reserved.
-
-Licensed under the LGPL
-
-**Obligatory screenshot**
-
-http://bitbucket.org/fubar/galaxytoolmaker/src/fda8032fe989/images/dynamicScriptTool.png
-
diff -r b938475235e3 -r e7e9732ebed6 test-data/output2_sample
--- a/test-data/output2_sample Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,165 +0,0 @@
-*trats uoy erofeb GNINRAW*
-
-YLNO yxalaG etavirp a no loot siht llatsnI
-ecnatsni noitcudorp ro cilbup a no REVEN esaelP
-
-troppus noitatic gnidda notlihC nhoJ yb 4102 tsugua detadpU
-
-keeB ned nav suiraM yb detroper sgub xif ot 4102 8 tsugua detadpU
-
-ta ecruoser eht etic esaelP
-fer=epytyek&JWYdwMrWs1hQzcl=yekji?375stb/tnirper/igc/gro.slanruojdrofxo.scitamrofnioib//:ptth
-.krow dehsilbup ruoy ni loot siht esu uoy fi
-
-**yrotS trohS**
-
-.sloot yxalaG wen gnitareneg fo elbapac loot yxalaG lausunu na si sihT
-gnitpircs suoregnad ylemertxe erofereht dna *detcirtsernu* gnisopxe yb skrow tI
-ot meht gniwolla ,revres yxalaG tsoh eht fo srotartsinimda detangised lla ot
-,stes atad tupni detceles elpitlum revo lrep dna hs ,nohtyp ,R ni stpircs nur
-.tuptuo sa tes atad wen elgnis a gnitirw
-
-*sretemarap elyts esrapgra ro lanoitisop htiw elbatucexe yna ro tpircs hsab/lrep/nohtyp/r gnikrow a evah uoY*
-
-.loot yxalaG a gnisu ,setunim ni loot yxalaG yranidro na otni denrut eb nac tI
-
-
-**yxalaG yna otni noitallatsni rof sloot yxalaG wen fo noitareneg detamotuA**
-
-.ylppus uoy sgnittes retemarap dna stupni atad tset elpmas llams gnisu detareneg si tset A
-a dliub ot desu eb nac yeht ,decudorp neeb evah stuptuo esac tset eht ecnO
-tnemeriuqer a sa dekab si elbatucexe ro tpircs deilppus ehT .loot yxalaG wen
-.xob eht fo tuo elbitapmoc wolfkrow ylluf ,loot yxalaG yranidro ,wen a otni
-rotartsinimda na yb dehs loot a aiv dellatsni era sloot detareneG
-.sresu ruoy rof sloot yxalaG rehto lla ekil yltcaxe krow dna
-
-**liateD eroM**
-
-a otni etsap ot tpircs a deraperp evah dluohs uoy ,yrotcaFlooT eht esu oT
-yrotsih ruoy morf tceles ot ydaer elpmaxe tupni tset llams a dna dnim ni egakcap a evah ro ,xob txet
-.tpircs wen ruoy tset ot
-
-em rof skrow ```atad-tset/yrotcafloot/srekam_loot/sloot/yxalag/~ atad_tset-- yxalag/~ toor_yxalag-- lmx.2yrotcaFlooTgr tset omenalp```
-
-uoY .mrof yrotcaF looT eht no egaugnal gnitpircs hcae ni elpmaxe na si erehT
-thgir eht tceles ot rebmemer - tuo ti yrt ot eseht etsap dna tuc tsuj nac
-gnisu tes atad tset llams a etaerc ot deen osla ll'uoY .esaelp reterpretni
-.loot atad wen dda yrotsih yxalaG eht
-
-ni tuptuo loot eht no nottub "oder" eht esu ,wohemos sliaf tpircs eht fI
-gub eht xiF .tpircs nekorb htiw etelpmoc mrof eht etaercer ot yrotsih ruoy
-.taeper ,hsaw ,esniR .niaga etucexe dna
-
-tpircs ruoy snur taht loot yxalaG wen a ,yllufssecus snur tpircs eht ecnO
-dna txet pleh emos ylppus dna noitpo "etareneg" eht tceleS .detareneg eb nac
-epytatad yxalaG wen a fo mrof eht ni detareneg eb lliw loot wen ehT .seman
-a ot daolpu ot ydaer evihcra na s'ti ,stseggus eman eht sa - *zg.dehsloot*
-.yrotisoper loot wen a sa dehSlooT yxalaG
-
-revres yxalaG lacol yna otni dellatsni eb nac ti ,dehSlooT a ni s'ti ecnO
-.ecafretni evitartsinimda revres eht morf
-
-tpircs eht ,emit hcae - ti nur nac sresu lacol ,dellatsni si loot wen eht ecnO
-nesohc tupni eht htiw detucexe eb lliw tliub saw ti nehw deilppus saw taht
-eht htiw etareneg uoy sloot eht ,sdrow rehto nI .yrotsih s'resu eht morf
-.emit yreve tpircs ruoy nur tub,loot yxalaG rehto yna ekil tsuj nur yrotcaFlooT
-
-,tuptuo eno ,tupni enO .stnenopmoc wolfkrow rof tcefrep era sloot yrotcaf looT
-.selbairav on
-
-,loot siht fo *rewop emosewa eht tiolpxe ylefas dna ylluf oT*
-siht gnillatsni repoleved a eb dluohs uoy ,dehSlooT eht dna yxalaG
-na era uoy erehw ecnatsni lacol hctarcs/lanosrep/etavirp a no loot
-ees seceip eht lla peek ot teg uoy ,ti kaerb uoy fi ,nehT .resu_nimda
-emoH/ikiw/yrotcaflootyxalag/rabuf/gro.tekcubtib//:sptth
-
-**noitallatsnI**
-eht gnisu yltneinevnoc tsom ti llatsni nac uoY .loot yxalaG a si sihT
-niaM yxalaG eht dniF .knil "sdehs loot esworb dna hcraeS" evitartsinimda
-yrotcafloot eht rof hcraes dna /ude.usp.xb.2g.dehsloot//:sptth ta dehsloot
-.ti llatsni ot noitpo eht tceles dna edoc eht weiver dna ti nepO .yrotisoper
-
-eb ot deen ereh selif yp dna lmx eht ,yaw taht loot eht teg t'nac uoy fI
-sloot wen a otni deipoc
-yrtne wen a sdeen lmx.fnoc_loot ruoY yrotcafloot/sloot sa hcus yrotceridbus
-lmx eht ot gnitniop
-::ekil gnihtemos - elif
-
->"sredliubloot"=di "sloot gnidliub looT"=eman noitces<
->/"lmx.yrotcaFlooTgr/yrotcafloot"=elif loot<
->noitces/<
-
-,ereht ydaerla ton fI
-:dda esaelp
-"yraniB:yranib.sepytatad.yxalag"=epyt "zg.dehsloot"=noisnetxe epytatad<
->/ "eurT"=ssalcbus "pizg-x/trapitlum"=epytemim
-.lmx.fnoc_sepyt_atad lacol ruoy ot
-
-
-**noitucexe detcirtseR**
-
-- sresu nimda yb YLNO elbasu eb neht lliw flesti loot yrotcaf loot ehT
-YLNO .thgir s'taht ,seY** ini.igsw_esrevinu ni sresu_nimda ni sDI htiw elpoep
-ot dewolla fI .tnemom a rof ti tuoba knihT **loot siht nur nac sresu_nimda
-dluow taht gniht ylno eht ,revres yxalaG ruoy no tpircs yrartibra yna nur
-ylbaborp dluow atad yxalaG ruoy lla gniyortsed no tneb tnaercsim a edepmi
-.slliks lacinhcet etairporppa fo kcal eb
-
-**seod ti tahW**
-
-dna R ,nohtyp ni stpircs elpmis rof yrotcaf loot a si sihT
-.taht si looc woH .detareneg yllacitamotua era stset lanoitcnuF .yltnerruc lrep
-
-nac yllanoitpO .yrotsih eht morf tupni eno daer taht stpircs elpmis ot DETIMIL
-stuptuo fo rebmun yna tcelloc yllanoitpo dna ,tesatad yrotsih wen eno etirw
-- etagivan ot resu eht rof egap xedni LMTH detarenegotua na no sknil otni
-nwohs era stuptuo fdp - selif tuptuo dna segami setirw tpircs eht fi lufesu
-dna taht os tpircstsohg htiw knurhs era s'fdp detaolb s'R dna slianbmuht sa
-.elbaliava eb ot deen kigamegami
-
-trats os ,loot yxalaG yna ekil decnahne dna detide eb nac sloot detareneG
-a ot pu gel suoires a uoy steg tpircs detareneg a ecnis pu dliub dna llams
-.eno xelpmoc erom
-
-**od uoy tahW**
-
-dna srorre xatnys eht xif uoy ,tpircs ruoy nur dna etsap uoY
-erofeb tpircs eht tide dna nottub oder eht esu nac uoY .snur ti yllautneve
-.llew ytterp skrow ti - gubed uoy sa ti nurer ot gniyrt
-
-elbitapmoc dehsloot a etareneg nac uoy ,atad tset emos no skrow tpircs eht ecnO
-ni loot yxalaG yranidro na sa nur ot ydaer tpircs ruoy gniniatnoc elif pizg
-detamotua ylegral dna efas snaem tahT .dehsloot lacol ruoy no yrotisoper a
-.dehsloot ruoy esu ot derugifnoc yxalaG noitcudorp yna ni noitallatsni
-
-**ytiruceS loot detareneG**
-
-tsuj s'ti ,loot detareneg a llatsni uoy ecnO
-rieht dna yllamron nur tsuj yehT .efas si tpircs eht gnimussa - loot rehtona
-.dehsloot efas ecitcarp ,esaelp tub erucesni yllausunu gnihtyna od tonnac resu
-.yracs yllaer si ti - eno siht yllaicepsE .loot yna llatsni uoy erofeb edoc eht daeR
-
-**edoC dneS**
-
-?esaelp seussi tekcubtib sa emoclew snoitseggus dna sehctaP
-
-**noitubirttA**
-
-yrotcaF looT yxalaG ehT :stpircs morf sloot elbasu-er gnitaerC
-maeT yxalaG ehT ;nnameiZ kraM ;ipsaK ynotnA ;surazaL ssoR
-375stb/scitamrofnioib/3901.01 :iod ;2102 scitamrofnioiB
-
-fer=epytyek&JWYdwMrWs1hQzcl=yekji?375stb/tnirper/igc/gro.slanruojdrofxo.scitamrofnioib//:ptth
-
-**gnisneciL**
-
-0102 surazaL ssoR thgirypoC
-moc doirep liam g ta surazal ssor
-
-.devreser sthgir llA
-
-LPGL eht rednu desneciL
-
-**tohsneercs yrotagilbO**
-
-gnp.looTtpircScimanyd/segami/989ef2308adf/crs/rekamlootyxalag/rabuf/gro.tekcubtib//:ptth
diff -r b938475235e3 -r e7e9732ebed6 test-data/pyrevpos.python
--- a/test-data/pyrevpos.python Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-# reverse order of text by row
-import sys
-inp = sys.argv[1]
-outp = sys.argv[2]
-i = open(inp,'r').readlines()
-o = open(outp,'w')
-for row in i:
- rs = row.rstrip()
- rs = list(rs)
- rs.reverse()
- o.write(''.join(rs))
-o.close()
-
diff -r b938475235e3 -r e7e9732ebed6 test-data/test1_log.txt
--- a/test-data/test1_log.txt Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-## Executing Toolfactory generated command line = python /tmp/pyrevposq5dmcdy1.python /tmp/tmpqrksf8sd/files/5/b/9/dataset_5b952a86-87df-44ad-a415-ea549f3f0cee.dat output2
diff -r b938475235e3 -r e7e9732ebed6 test-data/toolfactory_pyrevpos_tgz_sample
Binary file test-data/toolfactory_pyrevpos_tgz_sample has changed
diff -r b938475235e3 -r e7e9732ebed6 testtf.sh
--- a/testtf.sh Sun Aug 16 08:33:09 2020 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,2 +0,0 @@
-planemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/toolfactory &>foo
-
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/.github/workflows/commit.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.github/workflows/commit.yml Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,68 @@
+name: Galaxy Tool Linting and Tests for PR
+# run planemo on a git repository containing a single tool
+# as a github action. Does NOT run flake8. So, bite me.
+# ross lazarus august 2020
+on: [pull_request,push]
+env:
+ GALAXY_REPO: https://github.com/galaxyproject/galaxy
+ GALAXY_RELEASE: release_20.05
+jobs:
+ setup:
+ name: setup environment and python
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: [3.7]
+ steps:
+ - name: Print github context properties
+ run: |
+ echo 'event: ${{ github.event_name }}'
+ echo 'sha: ${{ github.sha }}'
+ echo 'ref: ${{ github.ref }}'
+ echo 'head_ref: ${{ github.head_ref }}'
+ echo 'base_ref: ${{ github.base_ref }}'
+ echo 'event.before: ${{ github.event.before }}'
+ echo 'event.after: ${{ github.event.after }}'
+ - uses: actions/setup-python@v1
+ with:
+ python-version: ${{ matrix.python-version }}
+ - uses: actions/checkout@v2
+ with:
+ # planemo does not seem to want to install the requirement galaxyxml
+ # into the venv it manages at tool testing so do it the old skool way
+ repository: 'galaxyproject/galaxy'
+ path: 'galaxy'
+ - name: make venv ready for this galaxy and planemo
+ run: |
+ python3 -m venv $GITHUB_WORKSPACE/galaxy/.venv
+ . $GITHUB_WORKSPACE/galaxy/.venv/bin/activate
+ pip install --upgrade pip
+ pip install wheel
+ pip install -r $GITHUB_WORKSPACE/galaxy/requirements.txt
+ # pip install galaxyxml # currently includes a patched working version awaiting PR merge
+ - name: Upgrade pip
+ run: pip install --upgrade pip
+ # Install the `wheel` package so that when installing other packages which
+ # are not available as wheels, pip will build a wheel for them, which can be cached.
+ - name: Install wheel
+ run: pip install wheel
+ - name: Install Planemo and flake8
+ run: pip install planemo flake8 flake8-import-order
+ # galaxyxml temporarily removed until PR accepted
+ - uses: actions/checkout@v2
+ with:
+ fetch-depth: 1
+ - name: flake8
+ run: flake8 --ignore=E2,E3,E4,E5,W3,W505
+ - name: Planemo lint
+ run: planemo lint .
+ - name: Planemo test tool
+ run: planemo test --galaxy_root $GITHUB_WORKSPACE/galaxy --test_output tool_test_output.html --skip_venv --test_output_json tool_test_output.json --galaxy_python_version ${{ matrix.python-version }} .
+ - name: Copy artifacts into place
+ run: |
+ mkdir upload
+ mv tool_test_output.json tool_test_output.html upload/
+ - uses: actions/upload-artifact@v2.0.1
+ with:
+ name: 'All tool test results'
+ path: upload
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/.gitignore
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.gitignore Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,129 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/.shed.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/.shed.yml Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,13 @@
+name: toolfactory
+owner: fubar
+description: ToolFactory - tool to make Galaxy tools ready for the toolshed
+homepage_url: https://github.com/fubar2/toolfactory
+long_description: |
+ ToolFactory - turn executable packages and R/python/perl/bash scripts into ordinary Galaxy tools
+
+ Creating re-usable tools from scripts: The Galaxy Tool Factory Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
+ Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
+remote_repository_url: https://github.com/fubar2/toolfactory
+type: tool_dependency_definition
+categories:
+- Tool Generators
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/LICENSE
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/LICENSE Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,504 @@
+GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+(This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.)
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+ {description}
+ Copyright (C) {year} {fullname}
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
+ USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random
+ Hacker.
+
+ {signature of Ty Coon}, 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/README.md
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/README.md Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,165 @@
+Note as at August 8 2020
+
+
+*WARNING before you start*
+
+ Install this tool on a private Galaxy ONLY
+ Please NEVER on a public or production instance
+
+Please cite the resource at
+http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
+if you use this tool in your published work.
+
+**Short Story**
+
+This is an unusual Galaxy tool capable of generating new Galaxy tools.
+It works by exposing *unrestricted* and therefore extremely dangerous scripting
+to all designated administrators of the host Galaxy server, allowing them to
+run scripts in R, python, sh and perl over multiple selected input data sets,
+writing a single new data set as output.
+
+*You have a working r/python/perl/bash script or any executable with positional or argparse style parameters*
+
+It can be turned into an ordinary Galaxy tool in minutes, using a Galaxy tool.
+
+**Automated generation of new Galaxy tools for installation into any Galaxy**
+
+A test is generated using small sample test data inputs and parameter settings you supply.
+Once the test case outputs have been produced, they can be used to build a
+new Galaxy tool. The supplied script or executable is baked as a requirement
+into a new, ordinary Galaxy tool, fully workflow compatible out of the box.
+Generated tools are installed via a tool shed by an administrator
+and work exactly like all other Galaxy tools for your users.
+
+**More Detail**
+
+To use the ToolFactory, you should have prepared a script to paste into a
+text box, or have a package in mind and a small test input example ready to select from your history
+to test your new script.
+
+```planemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/rossgit/toolfactory``` works for me
+
+There is an example in each scripting language on the Tool Factory form. You
+can just cut and paste these to try it out - remember to select the right
+interpreter please. You'll also need to create a small test data set using
+the Galaxy history add new data tool.
+
+If the script fails somehow, use the "redo" button on the tool output in
+your history to recreate the form complete with broken script. Fix the bug
+and execute again. Rinse, wash, repeat.
+
+Once the script runs sucessfully, a new Galaxy tool that runs your script
+can be generated. Select the "generate" option and supply some help text and
+names. The new tool will be generated in the form of a new Galaxy datatype
+*tgz* - as the name suggests, it's an archive ready to upload to a
+Galaxy ToolShed as a new tool repository.
+
+
+Once it's in a ToolShed, it can be installed into any local Galaxy server
+from the server administrative interface.
+
+Once the new tool is installed, local users can run it - each time, the script
+that was supplied when it was built will be executed with the input chosen
+from the user's history. In other words, the tools you generate with the
+ToolFactory run just like any other Galaxy tool,but run your script every time.
+
+Tool factory tools are perfect for workflow components. One input, one output,
+no variables.
+
+*To fully and safely exploit the awesome power* of this tool,
+Galaxy and the ToolShed, you should be a developer installing this
+tool on a private/personal/scratch local instance where you are an
+admin_user. Then, if you break it, you get to keep all the pieces see
+https://bitbucket.org/fubar/galaxytoolfactory/wiki/Home
+
+**Installation**
+This is a Galaxy tool. You can install it most conveniently using the
+administrative "Search and browse tool sheds" link. Find the Galaxy Main
+toolshed at https://toolshed.g2.bx.psu.edu/ and search for the toolfactory
+repository. Open it and review the code and select the option to install it.
+
+If you can't get the tool that way, the xml and py files here need to be
+copied into a new tools
+subdirectory such as tools/toolfactory Your tool_conf.xml needs a new entry
+pointing to the xml
+file - something like::
+
+
+
+If not already there,
+please add:
+
+to your local data_types_conf.xml.
+
+
+**Restricted execution**
+
+The tool factory tool itself will then be usable ONLY by admin users -
+people with IDs in admin_users in universe_wsgi.ini **Yes, that's right. ONLY
+admin_users can run this tool** Think about it for a moment. If allowed to
+run any arbitrary script on your Galaxy server, the only thing that would
+impede a miscreant bent on destroying all your Galaxy data would probably
+be lack of appropriate technical skills.
+
+**What it does**
+
+This is a tool factory for simple scripts in python, R and
+perl currently. Functional tests are automatically generated.
+
+LIMITED to simple scripts that read one input from the history. Optionally can
+write one new history dataset, and optionally collect any number of outputs
+into links on an autogenerated HTML index page for the user to navigate -
+useful if the script writes images and output files - pdf outputs are shown
+as thumbnails and R's bloated pdf's are shrunk with ghostscript so that and
+imagemagik need to be available.
+
+Generated tools can be edited and enhanced like any Galaxy tool, so start
+small and build up since a generated script gets you a serious leg up to a
+more complex one.
+
+**What you do**
+
+You paste and run your script, you fix the syntax errors and
+eventually it runs. You can use the redo button and edit the script before
+trying to rerun it as you debug - it works pretty well.
+
+Once the script works on some test data, you can generate a toolshed compatible
+gzip file containing your script ready to run as an ordinary Galaxy tool in
+a repository on your local toolshed. That means safe and largely automated
+installation in any production Galaxy configured to use your toolshed.
+
+**Generated tool Security**
+
+Once you install a generated tool, it's just
+another tool - assuming the script is safe. They just run normally and their
+user cannot do anything unusually insecure but please, practice safe toolshed.
+Read the code before you install any tool. Especially this one - it is really scary.
+
+**Send Code**
+
+Patches and suggestions welcome as bitbucket issues please?
+
+**Attribution**
+
+Creating re-usable tools from scripts: The Galaxy Tool Factory
+Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
+Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
+
+http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
+
+**Licensing**
+
+Copyright Ross Lazarus 2010
+ross lazarus at g mail period com
+
+All rights reserved.
+
+Licensed under the LGPL
+
+**Obligatory screenshot**
+
+http://bitbucket.org/fubar/galaxytoolmaker/src/fda8032fe989/images/dynamicScriptTool.png
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/TF_example_wf.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/TF_example_wf.ga Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,458 @@
+{
+ "a_galaxy_workflow": "true",
+ "annotation": "",
+ "format-version": "0.1",
+ "name": "TF examples",
+ "steps": {
+ "0": {
+ "annotation": "a fasta file - gc ratio will be estimated",
+ "content_id": null,
+ "errors": null,
+ "id": 0,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "a fasta file - gc ratio will be estimated",
+ "name": "phiX.fasta"
+ }
+ ],
+ "label": "phiX.fasta",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 231,
+ "height": 61,
+ "left": 393,
+ "right": 593,
+ "top": 170,
+ "width": 200,
+ "x": 393,
+ "y": 170
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"
+ }
+ ]
+ },
+ "1": {
+ "annotation": "Any text file to be reversed lots of times and bjorked once.",
+ "content_id": null,
+ "errors": null,
+ "id": 1,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "Any text file to be reversed lots of times and bjorked once.",
+ "name": "rgToolFactory2.py"
+ }
+ ],
+ "label": "rgToolFactory2.py",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 371,
+ "height": 81,
+ "left": 393,
+ "right": 593,
+ "top": 290,
+ "width": 200,
+ "x": 393,
+ "y": 290
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"
+ }
+ ]
+ },
+ "2": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 2,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 0,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 372,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 170,
+ "width": 200,
+ "x": 613,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"perl\", \"__current_case__\": 4, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"\\n# usage : perl toolExample.pl \\n\\nopen (IN, \\\"<$ARGV[0]\\\");\\nopen (OUT, \\\">$ARGV[1]\\\");\\nwhile () {\\n chop;\\n if (m/^>/) {\\n s/^>//;\\n if ($. > 1) {\\n print OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\n }\\n $gc = 0;\\n $length = 0;\\n } else {\\n ++$gc while m/[gc]/ig;\\n $length += length $_;\\n }\\n}\\nprint OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\nclose( IN );\\nclose( OUT );\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"perl version of gc counter from planemo example\", \"help_text\": \"**What it Does**\\ncounts gc using, ugh, perl...\\n\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"fasta\"], \"input_label\": \"input fasta file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"tsv\", \"history_CL\": \"2\"}], \"edit_params\": \"no\", \"additional_parameters\": []}, \"tool_name\": \"perlgc\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "3b6aab01-4759-4df6-801f-626678639e51",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "7aea56bd-4f39-4d3b-8254-a6675161d059"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "f964e779-2f92-4c81-9819-3e1ebc156664"
+ }
+ ]
+ },
+ "3": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 3,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 492,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 290,
+ "width": 200,
+ "x": 613,
+ "y": 290
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"system\", \"__current_case__\": 1, \"exe_package\": \"sed\", \"exe_package_version\": \"\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"sed runner\", \"help_text\": \"sed '/old/new/g input.txt\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input text\", \"input_help\": \"parameter_help\", \"input_CL\": \"3\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"STDOUT\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"sedstring\", \"param_type\": \"text\", \"param_value\": \"s/def/bjork!bjorkdef/g\", \"param_label\": \"parameter_label\", \"param_help\": \"parameter_help\", \"param_CL\": \"1\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"sedtest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "2138c717-5128-4c4b-bc22-4809cd001c34",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b0be8c95-7380-42b8-a16d-8e08578d4dd7"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "56635519-a9a0-49eb-8305-59cc1fcef99f"
+ }
+ ]
+ },
+ "4": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 4,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 652,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 410,
+ "width": 200,
+ "x": 613,
+ "y": 410
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\nappendme = sys.argv[3]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write(appendme)\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"appendme\", \"param_type\": \"text\", \"param_value\": \"added at the end\", \"param_label\": \"append string\", \"param_help\": \"parameter_help\", \"param_CL\": \"3\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"pyrevaddpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "30dbe033-30c4-4228-b0cb-854df30f5594",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "37fdd905-471d-4479-a98a-4dfbaa6314be"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "7c8a8dba-1e8c-49d5-b51d-a0ab09931932"
+ }
+ ]
+ },
+ "5": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 5,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 772,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 530,
+ "width": 200,
+ "x": 613,
+ "y": 530
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\nimport argparse\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--infile',default='')\\na('--outfile',default=None)\\nargs = parser.parse_args()\\ninp = args.infile\\noutp = args.outfile\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"reverse argparse\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"infile\", \"input_help\": \"parameter_help\", \"input_CL\": \"infile\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"outfile\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevargparse\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "91a0dccf-384c-491a-ae08-f426888d26cf",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "61a5271d-3940-4855-9093-a0710dc3fe08"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "a6602e23-dc1c-44b7-8ed7-cd9971ff9d30"
+ }
+ ]
+ },
+ "6": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 6,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 852,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 650,
+ "width": 200,
+ "x": 613,
+ "y": 650
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"bash\", \"__current_case__\": 5, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"rev | tac\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"tacrev\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"0\", \"__current_case__\": 2, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"2\"}]}, \"tool_name\": \"tacrev\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "edb5d852-908b-45bf-8892-e0e8c337c31d",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "c1394cf9-bb03-4ac3-8466-8ee0cc30c0a0"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e45566f4-d40e-4ad0-ad27-72ce814b13da"
+ }
+ ]
+ },
+ "7": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 7,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 992,
+ "height": 222,
+ "left": 613,
+ "right": 813,
+ "top": 770,
+ "width": 200,
+ "x": 613,
+ "y": 770
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "08a48555-8700-4652-a76b-df1f54197049",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b5bd73bb-1ddc-4161-be2e-370bab9aebbe"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e96ae086-a92a-4018-8f07-ebf4974807e6"
+ }
+ ]
+ },
+ "8": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 8,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 7,
+ "output_name": "new_tool"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 412,
+ "height": 242,
+ "left": 833,
+ "right": 1033,
+ "top": 170,
+ "width": 200,
+ "x": 833,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"import argparse\\nimport tarfile\\nimport os\\nimport tempfile\\nimport subprocess\\n\\n\\\"\\\"\\\"\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\n\\\"\\\"\\\"\\n\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--tooltgz',default='')\\na('--report',default=None)\\na('--toolout',default=None)\\na('--galaxy_root',default=None)\\nargs = parser.parse_args()\\ntoolname = args.toolout.split(os.sep)[-1]\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\ntf = tarfile.open(args.tooltgz,\\\"r:gz\\\")\\ntf.extractall(toolpath)\\ncl = \\\"planemo test --skip_venv --galaxy_root %s %s\\\" % (args.galaxy_root,toolpath)\\ncll = cl.split(' ')\\nsto = open(args.report, 'w')\\np = subprocess.run(cll, shell=False, stdout=sto)\\nretval = p.returncode\\nsto.close()\\n\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"Tool to test toolshed tool archives generated by the tool factory.\", \"help_text\": \"**What it Does**\\n\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\nIt was generated using the tool factory.\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"tgz\"], \"input_label\": \"tool toolshed tgz archive from history\", \"input_help\": \"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\", \"input_CL\": \"tooltgz\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"report\", \"history_format\": \"txt\", \"history_CL\": \"report\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"toolout\", \"param_type\": \"text\", \"param_value\": \"tools/toolmakers/planemotest\", \"param_label\": \"output path under galaxy root\", \"param_help\": \"This is where the tgz file will be extracted and tested by planemo\", \"param_CL\": \"toolout\", \"param_CLprefixed\": \"\"}, {\"__index__\": 1, \"param_name\": \"galaxy_root\", \"param_type\": \"text\", \"param_value\": \"/home/ross/galaxy\", \"param_label\": \"Galaxy source root directory to use for running planemo\", \"param_help\": \"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\", \"param_CL\": \"galaxy_root\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"planemotest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"
+ }
+ ]
+ }
+ },
+ "tags": [],
+ "uuid": "321a7f9f-c287-453c-807a-43afd948770e",
+ "version": 1
+}
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/Dockerfile
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/Dockerfile Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,46 @@
+# Galaxy - Toolshed Docker image
+# Derived from bgruening stable galaxy
+# VERSION 0.2
+# patches startup so the below will install a workflow dropped into $GALAXY_ROOT/workflows
+# use docker run -p 8080:80 -d -e GALAXY_AUTO_UPDATE_WORKFLOWS=/galaxy-central/workflows/TF_sample_wf.ga -v /home/ubuntu/galaxy_storage/:/export/ toolfactory
+FROM bgruening/galaxy-stable
+MAINTAINER Ross Lazarus ross.lazarus@gmail.com
+
+ENV GALAXY_CONFIG_BRAND=ToolFactory
+ENV GALAXY_LOGGING="full"
+
+# RUN apt-get update -y && apt-get install -y build-essential gcc apt-utils
+# needed for planemo build
+
+RUN /galaxy_venv/bin/python -m pip install --upgrade pip
+# RUN /galaxy_venv/bin/python -m pip install planemo
+
+# RUN add-tool-shed --url 'http://testtoolshed.g2.bx.psu.edu/' --name 'Test Tool Shed'
+ADD my_tool_list.yml $GALAXY_ROOT/config/toolfactory_tools.yaml
+ENV GALAXY_AUTO_UPDATE_TOOLS=$GALAXY_ROOT/config/toolfactory_tools.yaml
+
+# Add workflows to the Docker image
+RUN mkdir -p $GALAXY_ROOT/workflows
+ADD TF_example_wf.ga $GALAXY_ROOT/workflows/TF_example_wf.ga
+ADD post-start-actions.sh /export/post-start-actions.sh
+RUN chmod a+xr /export/post-start-actions.sh \
+ && chmod a+xr $GALAXY_ROOT/workflows/TF_example_wf.ga
+
+# Add Galaxy interactive tours
+# ADD ./rna-workbench-tours/* $GALAXY_ROOT/config/plugins/tours/
+# Add data library definition file
+# ADD library_data.yaml $GALAXY_ROOT/library_data.yaml
+# cleanup dance
+RUN apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && rm -rf ~/.cache/ \
+ && find $GALAXY_ROOT/ -name '*.pyc' -delete | true \
+ && find /usr/lib/ -name '*.pyc' -delete | true \
+ && find /var/log/ -name '*.log' -delete | true \
+ && find $GALAXY_VIRTUAL_ENV -name '*.pyc' -delete | true \
+ && rm -rf /tmp/* /root/.cache/ /var/cache/* $GALAXY_ROOT/client/node_modules/ $GALAXY_VIRTUAL_ENV/src/ /home/galaxy/.cache/ /home/galaxy/.npm
+ENV GALAXY_DEFAULT_ADMIN_USER=''
+VOLUME ["/export/"]
+EXPOSE :80
+EXPOSE :21
+EXPOSE :8800
+CMD ["/usr/bin/startup"]
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/TF_example_wf.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/TF_example_wf.ga Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,458 @@
+{
+ "a_galaxy_workflow": "true",
+ "annotation": "",
+ "format-version": "0.1",
+ "name": "TF examples",
+ "steps": {
+ "0": {
+ "annotation": "a fasta file - gc ratio will be estimated",
+ "content_id": null,
+ "errors": null,
+ "id": 0,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "a fasta file - gc ratio will be estimated",
+ "name": "phiX.fasta"
+ }
+ ],
+ "label": "phiX.fasta",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 231,
+ "height": 61,
+ "left": 393,
+ "right": 593,
+ "top": 170,
+ "width": 200,
+ "x": 393,
+ "y": 170
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"
+ }
+ ]
+ },
+ "1": {
+ "annotation": "Any text file to be reversed lots of times and bjorked once.",
+ "content_id": null,
+ "errors": null,
+ "id": 1,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "Any text file to be reversed lots of times and bjorked once.",
+ "name": "rgToolFactory2.py"
+ }
+ ],
+ "label": "rgToolFactory2.py",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 371,
+ "height": 81,
+ "left": 393,
+ "right": 593,
+ "top": 290,
+ "width": 200,
+ "x": 393,
+ "y": 290
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"
+ }
+ ]
+ },
+ "2": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 2,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 0,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 372,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 170,
+ "width": 200,
+ "x": 613,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"perl\", \"__current_case__\": 4, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"\\n# usage : perl toolExample.pl \\n\\nopen (IN, \\\"<$ARGV[0]\\\");\\nopen (OUT, \\\">$ARGV[1]\\\");\\nwhile () {\\n chop;\\n if (m/^>/) {\\n s/^>//;\\n if ($. > 1) {\\n print OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\n }\\n $gc = 0;\\n $length = 0;\\n } else {\\n ++$gc while m/[gc]/ig;\\n $length += length $_;\\n }\\n}\\nprint OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\nclose( IN );\\nclose( OUT );\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"perl version of gc counter from planemo example\", \"help_text\": \"**What it Does**\\ncounts gc using, ugh, perl...\\n\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"fasta\"], \"input_label\": \"input fasta file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"tsv\", \"history_CL\": \"2\"}], \"edit_params\": \"no\", \"additional_parameters\": []}, \"tool_name\": \"perlgc\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "3b6aab01-4759-4df6-801f-626678639e51",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "7aea56bd-4f39-4d3b-8254-a6675161d059"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "f964e779-2f92-4c81-9819-3e1ebc156664"
+ }
+ ]
+ },
+ "3": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 3,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 492,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 290,
+ "width": 200,
+ "x": 613,
+ "y": 290
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"system\", \"__current_case__\": 1, \"exe_package\": \"sed\", \"exe_package_version\": \"\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"sed runner\", \"help_text\": \"sed '/old/new/g input.txt\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input text\", \"input_help\": \"parameter_help\", \"input_CL\": \"3\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"STDOUT\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"sedstring\", \"param_type\": \"text\", \"param_value\": \"s/def/bjork!bjorkdef/g\", \"param_label\": \"parameter_label\", \"param_help\": \"parameter_help\", \"param_CL\": \"1\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"sedtest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "2138c717-5128-4c4b-bc22-4809cd001c34",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b0be8c95-7380-42b8-a16d-8e08578d4dd7"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "56635519-a9a0-49eb-8305-59cc1fcef99f"
+ }
+ ]
+ },
+ "4": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 4,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 652,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 410,
+ "width": 200,
+ "x": 613,
+ "y": 410
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\nappendme = sys.argv[3]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write(appendme)\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"appendme\", \"param_type\": \"text\", \"param_value\": \"added at the end\", \"param_label\": \"append string\", \"param_help\": \"parameter_help\", \"param_CL\": \"3\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"pyrevaddpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "30dbe033-30c4-4228-b0cb-854df30f5594",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "37fdd905-471d-4479-a98a-4dfbaa6314be"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "7c8a8dba-1e8c-49d5-b51d-a0ab09931932"
+ }
+ ]
+ },
+ "5": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 5,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 772,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 530,
+ "width": 200,
+ "x": 613,
+ "y": 530
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\nimport argparse\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--infile',default='')\\na('--outfile',default=None)\\nargs = parser.parse_args()\\ninp = args.infile\\noutp = args.outfile\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"reverse argparse\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"infile\", \"input_help\": \"parameter_help\", \"input_CL\": \"infile\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"outfile\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevargparse\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "91a0dccf-384c-491a-ae08-f426888d26cf",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "61a5271d-3940-4855-9093-a0710dc3fe08"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "a6602e23-dc1c-44b7-8ed7-cd9971ff9d30"
+ }
+ ]
+ },
+ "6": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 6,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 852,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 650,
+ "width": 200,
+ "x": 613,
+ "y": 650
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"bash\", \"__current_case__\": 5, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"rev | tac\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"tacrev\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"0\", \"__current_case__\": 2, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"2\"}]}, \"tool_name\": \"tacrev\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "edb5d852-908b-45bf-8892-e0e8c337c31d",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "c1394cf9-bb03-4ac3-8466-8ee0cc30c0a0"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e45566f4-d40e-4ad0-ad27-72ce814b13da"
+ }
+ ]
+ },
+ "7": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 7,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 992,
+ "height": 222,
+ "left": 613,
+ "right": 813,
+ "top": 770,
+ "width": 200,
+ "x": 613,
+ "y": 770
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "08a48555-8700-4652-a76b-df1f54197049",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b5bd73bb-1ddc-4161-be2e-370bab9aebbe"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e96ae086-a92a-4018-8f07-ebf4974807e6"
+ }
+ ]
+ },
+ "8": {
+ "annotation": "",
+ "content_id": "rgTF2",
+ "errors": null,
+ "id": 8,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 7,
+ "output_name": "new_tool"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 412,
+ "height": 242,
+ "left": 833,
+ "right": 1033,
+ "top": 170,
+ "width": 200,
+ "x": 833,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "rgTF2",
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"import argparse\\nimport tarfile\\nimport os\\nimport tempfile\\nimport subprocess\\n\\n\\\"\\\"\\\"\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\n\\\"\\\"\\\"\\n\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--tooltgz',default='')\\na('--report',default=None)\\na('--toolout',default=None)\\na('--galaxy_root',default=None)\\nargs = parser.parse_args()\\ntoolname = args.toolout.split(os.sep)[-1]\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\ntf = tarfile.open(args.tooltgz,\\\"r:gz\\\")\\ntf.extractall(toolpath)\\ncl = \\\"planemo test --skip_venv --galaxy_root %s %s\\\" % (args.galaxy_root,toolpath)\\ncll = cl.split(' ')\\nsto = open(args.report, 'w')\\np = subprocess.run(cll, shell=False, stdout=sto)\\nretval = p.returncode\\nsto.close()\\n\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"Tool to test toolshed tool archives generated by the tool factory.\", \"help_text\": \"**What it Does**\\n\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\nIt was generated using the tool factory.\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"tgz\"], \"input_label\": \"tool toolshed tgz archive from history\", \"input_help\": \"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\", \"input_CL\": \"tooltgz\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"report\", \"history_format\": \"txt\", \"history_CL\": \"report\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"toolout\", \"param_type\": \"text\", \"param_value\": \"tools/toolmakers/planemotest\", \"param_label\": \"output path under galaxy root\", \"param_help\": \"This is where the tgz file will be extracted and tested by planemo\", \"param_CL\": \"toolout\", \"param_CLprefixed\": \"\"}, {\"__index__\": 1, \"param_name\": \"galaxy_root\", \"param_type\": \"text\", \"param_value\": \"/home/ross/galaxy\", \"param_label\": \"Galaxy source root directory to use for running planemo\", \"param_help\": \"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\", \"param_CL\": \"galaxy_root\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"planemotest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"
+ }
+ ]
+ }
+ },
+ "tags": [],
+ "uuid": "321a7f9f-c287-453c-807a-43afd948770e",
+ "version": 1
+}
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/TF_example_wf2.ga
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/TF_example_wf2.ga Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,500 @@
+{
+ "a_galaxy_workflow": "true",
+ "annotation": "",
+ "format-version": "0.1",
+ "name": "TF examples (imported from uploaded file)",
+ "steps": {
+ "0": {
+ "annotation": "a fasta file - gc ratio will be estimated",
+ "content_id": null,
+ "errors": null,
+ "id": 0,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "a fasta file - gc ratio will be estimated",
+ "name": "phiX.fasta"
+ }
+ ],
+ "label": "phiX.fasta",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 231,
+ "height": 61,
+ "left": 393,
+ "right": 593,
+ "top": 170,
+ "width": 200,
+ "x": 393,
+ "y": 170
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "7e2e24c8-2327-4893-a5b3-6b696a6ecd33",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "f5ce2386-b80c-4691-a0a7-abeec8854461"
+ }
+ ]
+ },
+ "1": {
+ "annotation": "Any text file to be reversed lots of times and bjorked once.",
+ "content_id": null,
+ "errors": null,
+ "id": 1,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "Any text file to be reversed lots of times and bjorked once.",
+ "name": "rgToolFactory2.py"
+ }
+ ],
+ "label": "rgToolFactory2.py",
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "bottom": 371,
+ "height": 81,
+ "left": 393,
+ "right": 593,
+ "top": 290,
+ "width": 200,
+ "x": 393,
+ "y": 290
+ },
+ "tool_id": null,
+ "tool_state": "{\"optional\": false}",
+ "tool_version": null,
+ "type": "data_input",
+ "uuid": "f530b390-2424-4aae-8bd9-dd9d30277561",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "output",
+ "uuid": "3d2a1699-50af-46e1-8981-dc6c3de2cf6c"
+ }
+ ]
+ },
+ "2": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 2,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 0,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 372,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 170,
+ "width": 200,
+ "x": 613,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"perl\", \"__current_case__\": 4, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"\\n# usage : perl toolExample.pl \\n\\nopen (IN, \\\"<$ARGV[0]\\\");\\nopen (OUT, \\\">$ARGV[1]\\\");\\nwhile () {\\n chop;\\n if (m/^>/) {\\n s/^>//;\\n if ($. > 1) {\\n print OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\n }\\n $gc = 0;\\n $length = 0;\\n } else {\\n ++$gc while m/[gc]/ig;\\n $length += length $_;\\n }\\n}\\nprint OUT sprintf(\\\"%.3f\\\", $gc/$length) . \\\"\\\\n\\\";\\nclose( IN );\\nclose( OUT );\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"perl version of gc counter from planemo example\", \"help_text\": \"**What it Does**\\ncounts gc using, ugh, perl...\\n\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"fasta\"], \"input_label\": \"input fasta file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"tsv\", \"history_CL\": \"2\"}], \"edit_params\": \"no\", \"additional_parameters\": []}, \"tool_name\": \"perlgc\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "3b6aab01-4759-4df6-801f-626678639e51",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "f964e779-2f92-4c81-9819-3e1ebc156664"
+ },
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "7aea56bd-4f39-4d3b-8254-a6675161d059"
+ }
+ ]
+ },
+ "3": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 3,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 492,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 290,
+ "width": 200,
+ "x": 613,
+ "y": 290
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"system\", \"__current_case__\": 1, \"exe_package\": \"sed\", \"exe_package_version\": \"\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"sed runner\", \"help_text\": \"sed '/old/new/g input.txt\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input text\", \"input_help\": \"parameter_help\", \"input_CL\": \"3\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"STDOUT\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"sedstring\", \"param_type\": \"text\", \"param_value\": \"s/def/bjork!bjorkdef/g\", \"param_label\": \"parameter_label\", \"param_help\": \"parameter_help\", \"param_CL\": \"1\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"sedtest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "2138c717-5128-4c4b-bc22-4809cd001c34",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b0be8c95-7380-42b8-a16d-8e08578d4dd7"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "56635519-a9a0-49eb-8305-59cc1fcef99f"
+ }
+ ]
+ },
+ "4": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 4,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 652,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 410,
+ "width": 200,
+ "x": 613,
+ "y": 410
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\nappendme = sys.argv[3]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write(appendme)\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"appendme\", \"param_type\": \"text\", \"param_value\": \"added at the end\", \"param_label\": \"append string\", \"param_help\": \"parameter_help\", \"param_CL\": \"3\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"pyrevaddpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "30dbe033-30c4-4228-b0cb-854df30f5594",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "37fdd905-471d-4479-a98a-4dfbaa6314be"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "7c8a8dba-1e8c-49d5-b51d-a0ab09931932"
+ }
+ ]
+ },
+ "5": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 5,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 772,
+ "height": 242,
+ "left": 613,
+ "right": 813,
+ "top": 530,
+ "width": 200,
+ "x": 613,
+ "y": 530
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\nimport argparse\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--infile',default='')\\na('--outfile',default=None)\\nargs = parser.parse_args()\\ninp = args.infile\\noutp = args.outfile\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"reverse argparse\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"infile\", \"input_help\": \"parameter_help\", \"input_CL\": \"infile\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"outfile\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevargparse\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "91a0dccf-384c-491a-ae08-f426888d26cf",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "61a5271d-3940-4855-9093-a0710dc3fe08"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "a6602e23-dc1c-44b7-8ed7-cd9971ff9d30"
+ }
+ ]
+ },
+ "6": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 6,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 852,
+ "height": 202,
+ "left": 613,
+ "right": 813,
+ "top": 650,
+ "width": 200,
+ "x": 613,
+ "y": 650
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"bash\", \"__current_case__\": 5, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"rev | tac\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"tacrev\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"0\", \"__current_case__\": 2, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"input file\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"outfile\", \"history_format\": \"txt\", \"history_CL\": \"2\"}]}, \"tool_name\": \"tacrev\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "edb5d852-908b-45bf-8892-e0e8c337c31d",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "c1394cf9-bb03-4ac3-8466-8ee0cc30c0a0"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e45566f4-d40e-4ad0-ad27-72ce814b13da"
+ }
+ ]
+ },
+ "7": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 7,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 1,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 992,
+ "height": 222,
+ "left": 613,
+ "right": 813,
+ "top": 770,
+ "width": 200,
+ "x": 613,
+ "y": 770
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"# reverse order of text by row\\nimport sys\\ninp = sys.argv[1]\\noutp = sys.argv[2]\\ni = open(inp,'r').readlines()\\no = open(outp,'w')\\nfor row in i:\\n rs = row.rstrip()\\n rs = list(rs)\\n rs.reverse()\\n o.write(''.join(rs))\\n o.write('\\\\n')\\no.close()\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"pyrevpos\", \"help_text\": \"**What it Does**\", \"citations\": []}, \"ppass\": {\"parampass\": \"positional\", \"__current_case__\": 1, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"txt\"], \"input_label\": \"inputfile\", \"input_help\": \"parameter_help\", \"input_CL\": \"1\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"output\", \"history_format\": \"txt\", \"history_CL\": \"2\"}], \"edit_params\": \"yes\", \"additional_parameters\": []}, \"tool_name\": \"pyrevpos\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "08a48555-8700-4652-a76b-df1f54197049",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "e96ae086-a92a-4018-8f07-ebf4974807e6"
+ },
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "b5bd73bb-1ddc-4161-be2e-370bab9aebbe"
+ }
+ ]
+ },
+ "8": {
+ "annotation": "",
+ "content_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "errors": null,
+ "id": 8,
+ "input_connections": {
+ "ppass|history_inputs_0|input_files": {
+ "id": 7,
+ "output_name": "new_tool"
+ }
+ },
+ "inputs": [],
+ "label": null,
+ "name": "toolfactory",
+ "outputs": [
+ {
+ "name": "TF_run_report",
+ "type": "input"
+ },
+ {
+ "name": "new_tool",
+ "type": "tgz"
+ }
+ ],
+ "position": {
+ "bottom": 412,
+ "height": 242,
+ "left": 833,
+ "right": 1033,
+ "top": 170,
+ "width": 200,
+ "x": 833,
+ "y": 170
+ },
+ "post_job_actions": {},
+ "tool_id": "toolshed.g2.bx.psu.edu/repos/fubar/tool_factory_2/rgTF2/2.00",
+ "tool_shed_repository": {
+ "changeset_revision": "51fa77152988",
+ "name": "tool_factory_2",
+ "owner": "fubar",
+ "tool_shed": "toolshed.g2.bx.psu.edu"
+ },
+ "tool_state": "{\"__input_ext\": \"input\", \"chromInfo\": \"/home/ross/galaxy/tool-data/shared/ucsc/chrom/?.len\", \"interexe\": {\"interpreter\": \"python\", \"__current_case__\": 2, \"interpreter_version\": \"\", \"exe_package_version\": \"\", \"dynScript\": \"import argparse\\nimport tarfile\\nimport os\\nimport tempfile\\nimport subprocess\\n\\n\\\"\\\"\\\"\\nplanemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/pyrevargparse/ &> pyrevargparse\\n\\\"\\\"\\\"\\n\\nparser = argparse.ArgumentParser()\\na = parser.add_argument\\na('--tooltgz',default='')\\na('--report',default=None)\\na('--toolout',default=None)\\na('--galaxy_root',default=None)\\nargs = parser.parse_args()\\ntoolname = args.toolout.split(os.sep)[-1]\\ntoolpath = os.path.join(args.galaxy_root,args.toolout)\\ntf = tarfile.open(args.tooltgz,\\\"r:gz\\\")\\ntf.extractall(toolpath)\\ncl = \\\"planemo test --skip_venv --galaxy_root %s %s\\\" % (args.galaxy_root,toolpath)\\ncll = cl.split(' ')\\nsto = open(args.report, 'w')\\np = subprocess.run(cll, shell=False, stdout=sto)\\nretval = p.returncode\\nsto.close()\\n\"}, \"makeMode\": {\"make_Tool\": \"yes\", \"__current_case__\": 0, \"tool_version\": \"0.01\", \"tool_desc\": \"Tool to test toolshed tool archives generated by the tool factory.\", \"help_text\": \"**What it Does**\\n\\nGiven a toolshed tgz file generated by a tool factory run, this will unpack it and run planemo test, returning the planemo stdout as a report\\nIt was generated using the tool factory.\", \"citations\": []}, \"ppass\": {\"parampass\": \"argparse\", \"__current_case__\": 0, \"history_inputs\": [{\"__index__\": 0, \"input_files\": {\"__class__\": \"ConnectedValue\"}, \"input_formats\": [\"tgz\"], \"input_label\": \"tool toolshed tgz archive from history\", \"input_help\": \"Run planemo test on a tool shed tool archive tgz format file generated by the ToolFactory or Planemo\", \"input_CL\": \"tooltgz\"}], \"history_outputs\": [{\"__index__\": 0, \"history_name\": \"report\", \"history_format\": \"txt\", \"history_CL\": \"report\"}], \"edit_params\": \"yes\", \"additional_parameters\": [{\"__index__\": 0, \"param_name\": \"toolout\", \"param_type\": \"text\", \"param_value\": \"tools/toolmakers/planemotest\", \"param_label\": \"output path under galaxy root\", \"param_help\": \"This is where the tgz file will be extracted and tested by planemo\", \"param_CL\": \"toolout\", \"param_CLprefixed\": \"\"}, {\"__index__\": 1, \"param_name\": \"galaxy_root\", \"param_type\": \"text\", \"param_value\": \"/home/ross/galaxy\", \"param_label\": \"Galaxy source root directory to use for running planemo\", \"param_help\": \"This will form the galaxy_root parameter for rnning planemo using an existing Galaxy source tree, and the tgz will be extracted at a path relative to that rootu\", \"param_CL\": \"galaxy_root\", \"param_CLprefixed\": \"\"}]}, \"tool_name\": \"planemotest\", \"__page__\": null, \"__rerun_remap_job_id__\": null}",
+ "tool_version": "2.00",
+ "type": "tool",
+ "uuid": "b9bfb1a4-4c0c-4d39-9e74-223da72f8abc",
+ "workflow_outputs": [
+ {
+ "label": null,
+ "output_name": "TF_run_report",
+ "uuid": "09ba44ea-4da8-46f5-a411-ca054ccedd3b"
+ },
+ {
+ "label": null,
+ "output_name": "new_tool",
+ "uuid": "50a8ff4a-702a-4983-8202-8a79c0a3c978"
+ }
+ ]
+ }
+ },
+ "tags": [],
+ "uuid": "321a7f9f-c287-453c-807a-43afd948770e",
+ "version": 0
+}
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/dockerfile.seq
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/dockerfile.seq Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,25 @@
+# Galaxy - Toolshed docker
+
+FROM quay.io/bgruening/galaxy:19.01
+
+MAINTAINER Björn A. Grüning, bjoern.gruening@gmail.com
+
+ENV GALAXY_CONFIG_BRAND ToolFactory
+ENV GALAXY_CONFIG_SANITIZE_ALL_HTML false
+
+# Install tools
+#ADD data_managers.yaml $GALAXY_ROOT/data_managers.yaml
+#RUN install-tools $GALAXY_ROOT/data_managers.yaml && \
+# /tool_deps/_conda/bin/conda clean --tarballs && \
+# rm /export/galaxy-central/ -rf
+ADD my_tool_list.yml $GALAXY_ROOT/tools1.yaml
+RUN install-tools $GALAXY_ROOT/tools1.yaml && \
+ /tool_deps/_conda/bin/conda clean --tarballs && \
+ rm /export/galaxy-central/ -rf
+
+ADD TF_example_wf.ga $GALAXY_HOME/workflows/TF_example_wf.ga
+
+ADD post-start-actions.sh /export/post-start-actions.sh
+RUN chmod a+x /export/post-start-actions.sh
+
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/my_tool_list.yml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/my_tool_list.yml Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,9 @@
+install_resolver_dependencies: true
+install_tool_dependencies: false
+tools:
+- name: tool_factory_2
+ owner: fubar
+ tool_panel_section_label: 'Make new Tools'
+ tool_shed_url: https://toolshed.g2.bx.psu.edu
+
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/post-start-actions.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/post-start-actions.sh Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,5 @@
+#!/bin/bash
+# hook to install tf demo workflow
+echo "#### post start actions.sh hook happening"
+chown $GALAXY_USER $GALAXY_ROOT/workflows/TF_example_wf.ga
+workflow-install -w $GALAXY_ROOT/workflows/TF_example_wf.ga -g http://localhost -a fakekey --publish_workflows
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/startgaldock.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startgaldock.sh Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,1 @@
+docker run -d -p 8080:80 -v /home/ubuntu/galaxy_storage/:/export/ toolfactory
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/startup
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startup Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,460 @@
+#!/usr/bin/env bash
+
+# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/
+
+if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then
+ mkdir -p /export/tool_deps/
+ mv /export/galaxy-central/tool_deps /export/
+ ln -s /export/tool_deps/ $GALAXY_ROOT/
+fi
+
+# This is needed for Docker compose to have a unified alias for the main container.
+# Modifying /etc/hosts can only happen during runtime not during build-time
+echo "127.0.0.1 galaxy" >> /etc/hosts
+
+# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2
+ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null
+
+# If the Galaxy config file is not in the expected place, copy from the sample
+# and hope for the best (that the admin has done all the setup through env vars.)
+if [ ! -f $GALAXY_CONFIG_FILE ]
+ then
+ # this should succesfully copy either .yml or .ini sample file to the expected location
+ cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE
+fi
+
+# Configure proxy prefix filtering
+if [[ ! -z $PROXY_PREFIX ]]
+ then
+ if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]
+ then
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null
+ else
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ module:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ socket:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ mount:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ manage-script-name:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' manage-script-name: true'" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()'" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' socket: unix:///srv/galaxy/var/uwsgi.sock'" &> /dev/null
+
+ # Also set SCRIPT_NAME. It's not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm
+ ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp='^ uwsgi_param SCRIPT_NAME' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter='^ include uwsgi_params' line=' uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};'" &> /dev/null
+ fi
+
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null
+
+ # Fix path to html assets
+ ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp='(href=\"|\')[/\\w]*(/static)' replace='\\1${PROXY_PREFIX}\\2'" &> /dev/null
+
+ # Set some other vars based on that prefix
+ if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]
+ then
+ export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"
+ fi
+ if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]
+ then
+ export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"
+ fi
+
+ # Change the defaults nginx upload/x-accel paths
+ if [ "$GALAXY_CONFIG_NGINX_UPLOAD_PATH" == "/_upload" ]
+ then
+ export GALAXY_CONFIG_NGINX_UPLOAD_PATH="${PROXY_PREFIX}${GALAXY_CONFIG_NGINX_UPLOAD_PATH}"
+ fi
+fi
+
+# Disable authentication of Galaxy reports
+if [[ ! -z $DISABLE_REPORTS_AUTH ]]
+ then
+ # disable authentification
+ echo "Disable Galaxy reports authentification "
+ echo "" > /etc/nginx/conf.d/reports_auth.conf
+ else
+ # enable authentification
+ echo "Enable Galaxy reports authentification "
+ cp /etc/nginx/conf.d/reports_auth.conf.source /etc/nginx/conf.d/reports_auth.conf
+fi
+
+# Try to guess if we are running under --privileged mode
+if [[ ! -z $HOST_DOCKER_LEGACY ]]; then
+ if mount | grep "/proc/kcore"; then
+ PRIVILEGED=false
+ else
+ PRIVILEGED=true
+ fi
+else
+ # Taken from http://stackoverflow.com/questions/32144575/how-to-know-if-a-docker-container-is-running-in-privileged-mode
+ ip link add dummy0 type dummy 2>/dev/null
+ if [[ $? -eq 0 ]]; then
+ PRIVILEGED=true
+ # clean the dummy0 link
+ ip link delete dummy0 2>/dev/null
+ else
+ PRIVILEGED=false
+ fi
+fi
+
+cd $GALAXY_ROOT
+. $GALAXY_VIRTUAL_ENV/bin/activate
+
+if $PRIVILEGED; then
+ umount /var/lib/docker
+fi
+
+if [[ ! -z $STARTUP_EXPORT_USER_FILES ]]; then
+ # If /export/ is mounted, export_user_files file moving all data to /export/
+ # symlinks will point from the original location to the new path under /export/
+ # If /export/ is not given, nothing will happen in that step
+ echo "Checking /export..."
+ python3 /usr/local/bin/export_user_files.py $PG_DATA_DIR_DEFAULT
+fi
+
+# Delete compiled templates in case they are out of date
+if [[ ! -z $GALAXY_CONFIG_TEMPLATE_CACHE_PATH ]]; then
+ rm -rf $GALAXY_CONFIG_TEMPLATE_CACHE_PATH/*
+fi
+
+# Enable loading of dependencies on startup. Such as LDAP.
+# Adapted from galaxyproject/galaxy/scripts/common_startup.sh
+if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]]
+ then
+ echo "Installing optional dependencies in galaxy virtual environment..."
+ : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
+ GALAXY_CONDITIONAL_DEPENDENCIES=$(PYTHONPATH=lib python -c "import galaxy.dependencies; print('\n'.join(galaxy.dependencies.optional('$GALAXY_CONFIG_FILE')))")
+ [ -z "$GALAXY_CONDITIONAL_DEPENDENCIES" ] || echo "$GALAXY_CONDITIONAL_DEPENDENCIES" | pip install -q -r /dev/stdin --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
+
+if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]] && [[ ! -z $LOAD_PYTHON_DEV_DEPENDENCIES ]]
+ then
+ echo "Installing development requirements in galaxy virtual environment..."
+ : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
+ dev_requirements='./lib/galaxy/dependencies/dev-requirements.txt'
+ [ -f $dev_requirements ] && pip install -q -r $dev_requirements --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
+
+# Enable Test Tool Shed
+if [[ ! -z $ENABLE_TTS_INSTALL ]]
+ then
+ echo "Enable installation from the Test Tool Shed."
+ export GALAXY_CONFIG_TOOL_SHEDS_CONFIG_FILE=$GALAXY_HOME/tool_sheds_conf.xml
+fi
+
+# Remove all default tools from Galaxy by default
+if [[ ! -z $BARE ]]
+ then
+ echo "Remove all tools from the tool_conf.xml file."
+ export GALAXY_CONFIG_TOOL_CONFIG_FILE=config/shed_tool_conf.xml,$GALAXY_ROOT/test/functional/tools/upload_tool_conf.xml
+fi
+
+# If auto installing conda envs, make sure bcftools is installed for __set_metadata__ tool
+if [[ ! -z $GALAXY_CONFIG_CONDA_AUTO_INSTALL ]]
+ then
+ if [ ! -d "/tool_deps/_conda/envs/__bcftools@1.5" ]; then
+ su $GALAXY_USER -c "/tool_deps/_conda/bin/conda create -y --override-channels --channel iuc --channel conda-forge --channel bioconda --channel defaults --name __bcftools@1.5 bcftools=1.5"
+ su $GALAXY_USER -c "/tool_deps/_conda/bin/conda clean --tarballs --yes"
+ fi
+fi
+
+if [[ ! -z $GALAXY_EXTRAS_CONFIG_POSTGRES ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ # Backward compatibility for exported postgresql directories before version 15.08.
+ # In previous versions postgres has the UID/GID of 102/106. We changed this in
+ # https://github.com/bgruening/docker-galaxy-stable/pull/71 to GALAXY_POSTGRES_UID=1550 and
+ # GALAXY_POSTGRES_GID=1550
+ if [ -e /export/postgresql/ ];
+ then
+ if [ `stat -c %g /export/postgresql/` == "106" ];
+ then
+ chown -R postgres:postgres /export/postgresql/
+ fi
+ fi
+ fi
+fi
+
+
+if [[ ! -z $GALAXY_EXTRAS_CONFIG_CONDOR ]]; then
+ if [[ ! -z $ENABLE_CONDOR ]]
+ then
+ if [[ ! -z $CONDOR_HOST ]]
+ then
+ echo "Enabling Condor with external scheduler at $CONDOR_HOST"
+ echo "# Config generated by startup.sh
+CONDOR_HOST = $CONDOR_HOST
+ALLOW_ADMINISTRATOR = *
+ALLOW_OWNER = *
+ALLOW_READ = *
+ALLOW_WRITE = *
+ALLOW_CLIENT = *
+ALLOW_NEGOTIATOR = *
+DAEMON_LIST = MASTER, SCHEDD
+UID_DOMAIN = galaxy
+DISCARD_SESSION_KEYRING_ON_STARTUP = False
+TRUST_UID_DOMAIN = true" > /etc/condor/condor_config.local
+ fi
+
+ if [[ -e /export/condor_config ]]
+ then
+ echo "Replacing Condor config by locally supplied config from /export/condor_config"
+ rm -f /etc/condor/condor_config
+ ln -s /export/condor_config /etc/condor/condor_config
+ fi
+ fi
+fi
+
+
+# Copy or link the slurm/munge config files
+if [ -e /export/slurm.conf ]
+then
+ rm -f /etc/slurm-llnl/slurm.conf
+ ln -s /export/slurm.conf /etc/slurm-llnl/slurm.conf
+else
+ # Configure SLURM with runtime hostname.
+ # Use absolute path to python so virtualenv is not used.
+ /usr/bin/python /usr/sbin/configure_slurm.py
+fi
+if [ -e /export/munge.key ]
+then
+ rm -f /etc/munge/munge.key
+ ln -s /export/munge.key /etc/munge/munge.key
+ chmod 400 /export/munge.key
+fi
+
+# link the gridengine config file
+if [ -e /export/act_qmaster ]
+then
+ rm -f /var/lib/gridengine/default/common/act_qmaster
+ ln -s /export/act_qmaster /var/lib/gridengine/default/common/act_qmaster
+fi
+
+# Waits until postgres is ready
+function wait_for_postgres {
+ echo "Checking if database is up and running"
+ until /usr/local/bin/check_database.py 2>&1 >/dev/null; do sleep 1; echo "Waiting for database"; done
+ echo "Database connected"
+}
+
+# $NONUSE can be set to include cron, proftp, reports or nodejs
+# if included we will _not_ start these services.
+function start_supervisor {
+ supervisord -c /etc/supervisor/supervisord.conf
+ sleep 5
+
+ if [[ ! -z $SUPERVISOR_MANAGE_POSTGRES && ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ echo "Starting postgres"
+ supervisorctl start postgresql
+ fi
+ fi
+
+ wait_for_postgres
+
+ # Make sure the database is automatically updated
+ if [[ ! -z $GALAXY_AUTO_UPDATE_DB ]]
+ then
+ echo "Updating Galaxy database"
+ sh manage_db.sh -c /etc/galaxy/galaxy.yml upgrade
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_CRON ]]; then
+ if [[ $NONUSE != *"cron"* ]]
+ then
+ echo "Starting cron"
+ supervisorctl start cron
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_PROFTP ]]; then
+ if [[ $NONUSE != *"proftp"* ]]
+ then
+ echo "Starting ProFTP"
+ supervisorctl start proftpd
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_REPORTS ]]; then
+ if [[ $NONUSE != *"reports"* ]]
+ then
+ echo "Starting Galaxy reports webapp"
+ supervisorctl start reports
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_IE_PROXY ]]; then
+ if [[ $NONUSE != *"nodejs"* ]]
+ then
+ echo "Starting nodejs"
+ supervisorctl start galaxy:galaxy_nodejs_proxy
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_CONDOR ]]; then
+ if [[ $NONUSE != *"condor"* ]]
+ then
+ echo "Starting condor"
+ supervisorctl start condor
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_SLURM ]]; then
+ if [[ $NONUSE != *"slurmctld"* ]]
+ then
+ echo "Starting slurmctld"
+ supervisorctl start slurmctld
+ fi
+ if [[ $NONUSE != *"slurmd"* ]]
+ then
+ echo "Starting slurmd"
+ supervisorctl start slurmd
+ fi
+ supervisorctl start munge
+ else
+ if [[ $NONUSE != *"slurmctld"* ]]
+ then
+ echo "Starting slurmctld"
+ /usr/sbin/slurmctld -L $GALAXY_LOGS_DIR/slurmctld.log
+ fi
+ if [[ $NONUSE != *"slurmd"* ]]
+ then
+ echo "Starting slurmd"
+ /usr/sbin/slurmd -L $GALAXY_LOGS_DIR/slurmd.log
+ fi
+
+ # We need to run munged regardless
+ mkdir -p /var/run/munge && /usr/sbin/munged -f
+ fi
+}
+
+if [[ ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ # Change the data_directory of postgresql in the main config file
+ ansible localhost -m lineinfile -a "line='data_directory = \'$PG_DATA_DIR_HOST\'' dest=$PG_CONF_DIR_DEFAULT/postgresql.conf backup=yes state=present regexp='data_directory'" &> /dev/null
+ fi
+fi
+
+if $PRIVILEGED; then
+ echo "Enable Galaxy Interactive Environments."
+ export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY="config/plugins/interactive_environments"
+ if [ x$DOCKER_PARENT == "x" ]; then
+ #build the docker in docker environment
+ bash /root/cgroupfs_mount.sh
+ start_supervisor
+ supervisorctl start docker
+ else
+ #inheriting /var/run/docker.sock from parent, assume that you need to
+ #run docker with sudo to validate
+ echo "$GALAXY_USER ALL = NOPASSWD : ALL" >> /etc/sudoers
+ start_supervisor
+ fi
+ if [[ ! -z $PULL_IE_IMAGES ]]; then
+ echo "About to pull IE images. Depending on the size, this may take a while!"
+
+ for ie in {JUPYTER,RSTUDIO,ETHERCALC,PHINCH,NEO}; do
+ enabled_var_name="GALAXY_EXTRAS_IE_FETCH_${ie}";
+ if [[ ${!enabled_var_name} ]]; then
+ # Store name in a var
+ image_var_name="GALAXY_EXTRAS_${ie}_IMAGE"
+ # And then read from that var
+ docker pull "${!image_var_name}"
+ fi
+ done
+ fi
+
+ # in privileged mode autofs and CVMFS is available
+ # install autofs
+ echo "Installing autofs to enable automatic CVMFS mounts"
+ apt-get install autofs --no-install-recommends -y
+ apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/*
+else
+ echo "Disable Galaxy Interactive Environments. Start with --privileged to enable IE's."
+ export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""
+ start_supervisor
+fi
+
+if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]
+then
+ echo "Settting up letsencrypt"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=letsencrypt \
+ --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+fi
+if [ "$USE_HTTPS" != "False" ]
+then
+ if [ -f /export/server.key -a -f /export/server.crt ]
+ then
+ echo "Copying SSL keys"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=own \
+ --extra-vars src_nginx_ssl_certificate_key=/export/server.key \
+ --extra-vars src_nginx_ssl_certificate=/export/server.crt \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+ else
+ echo "Setting up self-signed SSL keys"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=self-signed \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+ fi
+fi
+
+# In case the user wants the default admin to be created, do so.
+if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]
+ then
+ echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"
+ python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \
+ -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"
+ # If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.
+ # then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables
+ # visible here.
+ # The file needs to be executable (chmod a+x post-start-actions.sh)
+fi
+if [ -x /export/post-start-actions.sh ]
+ then
+ # uses ephemeris, present in docker-galaxy-stable, to wait for the local instance
+ /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log &&
+ /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &
+fi
+
+
+# Reinstall tools if the user want to
+if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]
+ then
+ /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&
+ OLDIFS=$IFS
+ IFS=','
+ for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`
+ do
+ echo "Installing tools from $TOOL_YML"
+ /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"
+ /tool_deps/_conda/bin/conda clean --tarballs --yes
+ done
+ IFS=$OLDIFS
+fi
+
+# migrate custom IEs or Visualisations (Galaxy plugins)
+# this is needed for by the new client build system
+python3 ${GALAXY_ROOT}/scripts/plugin_staging.py
+
+# Enable verbose output
+if [ `echo ${GALAXY_LOGGING:-'no'} | tr [:upper:] [:lower:]` = "full" ]
+ then
+ tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log
+ else
+ tail -f $GALAXY_LOGS_DIR/*.log
+fi
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/docker/startup.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/docker/startup.sh Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,462 @@
+#!/usr/bin/env bash
+
+# Migration path for old images that had the tool_deps under /export/galaxy-central/tool_deps/
+
+if [ -d "/export/galaxy-central/tool_deps/" ] && [ ! -L "/export/galaxy-central/tool_deps/" ]; then
+ mkdir -p /export/tool_deps/
+ mv /export/galaxy-central/tool_deps /export/
+ ln -s /export/tool_deps/ $GALAXY_ROOT/
+fi
+
+# This is needed for Docker compose to have a unified alias for the main container.
+# Modifying /etc/hosts can only happen during runtime not during build-time
+echo "127.0.0.1 galaxy" >> /etc/hosts
+
+# Set number of Galaxy handlers via GALAXY_HANDLER_NUMPROCS or default to 2
+ansible localhost -m ini_file -a "dest=/etc/supervisor/conf.d/galaxy.conf section=program:handler option=numprocs value=${GALAXY_HANDLER_NUMPROCS:-2}" &> /dev/null
+
+# If the Galaxy config file is not in the expected place, copy from the sample
+# and hope for the best (that the admin has done all the setup through env vars.)
+if [ ! -f $GALAXY_CONFIG_FILE ]
+ then
+ # this should succesfully copy either .yml or .ini sample file to the expected location
+ cp /export/config/galaxy${GALAXY_CONFIG_FILE: -4}.sample $GALAXY_CONFIG_FILE
+fi
+
+# Configure proxy prefix filtering
+if [[ ! -z $PROXY_PREFIX ]]
+ then
+ if [ ${GALAXY_CONFIG_FILE: -4} == ".ini" ]
+ then
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}" &> /dev/null
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_FILE} section=app:main option=filter-with value=proxy-prefix" &> /dev/null
+ else
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ module:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ socket:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ mount:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} regexp='^ manage-script-name:' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' manage-script-name: true'" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' mount: ${PROXY_PREFIX}=galaxy.webapps.galaxy.buildapp:uwsgi_app()'" &> /dev/null
+ ansible localhost -m lineinfile -a "path=${GALAXY_CONFIG_FILE} insertafter='^uwsgi:' line=' socket: unix:///srv/galaxy/var/uwsgi.sock'" &> /dev/null
+
+ # Also set SCRIPT_NAME. It's not always necessary due to manage-script-name: true in galaxy.yml, but it makes life easier in this container + it does no harm
+ ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf regexp='^ uwsgi_param SCRIPT_NAME' state=absent" &> /dev/null
+ ansible localhost -m lineinfile -a "path=/etc/nginx/conf.d/uwsgi.conf insertafter='^ include uwsgi_params' line=' uwsgi_param SCRIPT_NAME ${PROXY_PREFIX};'" &> /dev/null
+ fi
+
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=filter:proxy-prefix option=prefix value=${PROXY_PREFIX}/reports" &> /dev/null
+ ansible localhost -m ini_file -a "dest=${GALAXY_CONFIG_DIR}/reports_wsgi.ini section=app:main option=filter-with value=proxy-prefix" &> /dev/null
+
+ # Fix path to html assets
+ ansible localhost -m replace -a "dest=$GALAXY_CONFIG_DIR/web/welcome.html regexp='(href=\"|\')[/\\w]*(/static)' replace='\\1${PROXY_PREFIX}\\2'" &> /dev/null
+
+ # Set some other vars based on that prefix
+ if [ "x$GALAXY_CONFIG_COOKIE_PATH" == "x" ]
+ then
+ export GALAXY_CONFIG_COOKIE_PATH="$PROXY_PREFIX"
+ fi
+ if [ "x$GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX" == "x" ]
+ then
+ export GALAXY_CONFIG_DYNAMIC_PROXY_PREFIX="$PROXY_PREFIX/gie_proxy"
+ fi
+
+ # Change the defaults nginx upload/x-accel paths
+ if [ "$GALAXY_CONFIG_NGINX_UPLOAD_PATH" == "/_upload" ]
+ then
+ export GALAXY_CONFIG_NGINX_UPLOAD_PATH="${PROXY_PREFIX}${GALAXY_CONFIG_NGINX_UPLOAD_PATH}"
+ fi
+fi
+
+# Disable authentication of Galaxy reports
+if [[ ! -z $DISABLE_REPORTS_AUTH ]]
+ then
+ # disable authentification
+ echo "Disable Galaxy reports authentification "
+ echo "" > /etc/nginx/conf.d/reports_auth.conf
+ else
+ # enable authentification
+ echo "Enable Galaxy reports authentification "
+ cp /etc/nginx/conf.d/reports_auth.conf.source /etc/nginx/conf.d/reports_auth.conf
+fi
+
+# Try to guess if we are running under --privileged mode
+if [[ ! -z $HOST_DOCKER_LEGACY ]]; then
+ if mount | grep "/proc/kcore"; then
+ PRIVILEGED=false
+ else
+ PRIVILEGED=true
+ fi
+else
+ # Taken from http://stackoverflow.com/questions/32144575/how-to-know-if-a-docker-container-is-running-in-privileged-mode
+ ip link add dummy0 type dummy 2>/dev/null
+ if [[ $? -eq 0 ]]; then
+ PRIVILEGED=true
+ # clean the dummy0 link
+ ip link delete dummy0 2>/dev/null
+ else
+ PRIVILEGED=false
+ fi
+fi
+
+cd $GALAXY_ROOT
+. $GALAXY_VIRTUAL_ENV/bin/activate
+
+if $PRIVILEGED; then
+ umount /var/lib/docker
+fi
+
+if [[ ! -z $STARTUP_EXPORT_USER_FILES ]]; then
+ # If /export/ is mounted, export_user_files file moving all data to /export/
+ # symlinks will point from the original location to the new path under /export/
+ # If /export/ is not given, nothing will happen in that step
+ echo "Checking /export..."
+ python3 /usr/local/bin/export_user_files.py $PG_DATA_DIR_DEFAULT
+fi
+
+# Delete compiled templates in case they are out of date
+if [[ ! -z $GALAXY_CONFIG_TEMPLATE_CACHE_PATH ]]; then
+ rm -rf $GALAXY_CONFIG_TEMPLATE_CACHE_PATH/*
+fi
+
+# Enable loading of dependencies on startup. Such as LDAP.
+# Adapted from galaxyproject/galaxy/scripts/common_startup.sh
+if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]]
+ then
+ echo "Installing optional dependencies in galaxy virtual environment..."
+ : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
+ GALAXY_CONDITIONAL_DEPENDENCIES=$(PYTHONPATH=lib python -c "import galaxy.dependencies; print('\n'.join(galaxy.dependencies.optional('$GALAXY_CONFIG_FILE')))")
+ [ -z "$GALAXY_CONDITIONAL_DEPENDENCIES" ] || echo "$GALAXY_CONDITIONAL_DEPENDENCIES" | pip install -q -r /dev/stdin --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
+
+if [[ ! -z $LOAD_GALAXY_CONDITIONAL_DEPENDENCIES ]] && [[ ! -z $LOAD_PYTHON_DEV_DEPENDENCIES ]]
+ then
+ echo "Installing development requirements in galaxy virtual environment..."
+ : ${GALAXY_WHEELS_INDEX_URL:="https://wheels.galaxyproject.org/simple"}
+ dev_requirements='./lib/galaxy/dependencies/dev-requirements.txt'
+ [ -f $dev_requirements ] && pip install -q -r $dev_requirements --index-url "${GALAXY_WHEELS_INDEX_URL}"
+fi
+
+# Enable Test Tool Shed
+if [[ ! -z $ENABLE_TTS_INSTALL ]]
+ then
+ echo "Enable installation from the Test Tool Shed."
+ export GALAXY_CONFIG_TOOL_SHEDS_CONFIG_FILE=$GALAXY_HOME/tool_sheds_conf.xml
+fi
+
+# Remove all default tools from Galaxy by default
+if [[ ! -z $BARE ]]
+ then
+ echo "Remove all tools from the tool_conf.xml file."
+ export GALAXY_CONFIG_TOOL_CONFIG_FILE=config/shed_tool_conf.xml,$GALAXY_ROOT/test/functional/tools/upload_tool_conf.xml
+fi
+
+# If auto installing conda envs, make sure bcftools is installed for __set_metadata__ tool
+if [[ ! -z $GALAXY_CONFIG_CONDA_AUTO_INSTALL ]]
+ then
+ if [ ! -d "/tool_deps/_conda/envs/__bcftools@1.5" ]; then
+ su $GALAXY_USER -c "/tool_deps/_conda/bin/conda create -y --override-channels --channel iuc --channel conda-forge --channel bioconda --channel defaults --name __bcftools@1.5 bcftools=1.5"
+ su $GALAXY_USER -c "/tool_deps/_conda/bin/conda clean --tarballs --yes"
+ fi
+fi
+
+if [[ ! -z $GALAXY_EXTRAS_CONFIG_POSTGRES ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ # Backward compatibility for exported postgresql directories before version 15.08.
+ # In previous versions postgres has the UID/GID of 102/106. We changed this in
+ # https://github.com/bgruening/docker-galaxy-stable/pull/71 to GALAXY_POSTGRES_UID=1550 and
+ # GALAXY_POSTGRES_GID=1550
+ if [ -e /export/postgresql/ ];
+ then
+ if [ `stat -c %g /export/postgresql/` == "106" ];
+ then
+ chown -R postgres:postgres /export/postgresql/
+ fi
+ fi
+ fi
+fi
+
+
+if [[ ! -z $GALAXY_EXTRAS_CONFIG_CONDOR ]]; then
+ if [[ ! -z $ENABLE_CONDOR ]]
+ then
+ if [[ ! -z $CONDOR_HOST ]]
+ then
+ echo "Enabling Condor with external scheduler at $CONDOR_HOST"
+ echo "# Config generated by startup.sh
+CONDOR_HOST = $CONDOR_HOST
+ALLOW_ADMINISTRATOR = *
+ALLOW_OWNER = *
+ALLOW_READ = *
+ALLOW_WRITE = *
+ALLOW_CLIENT = *
+ALLOW_NEGOTIATOR = *
+DAEMON_LIST = MASTER, SCHEDD
+UID_DOMAIN = galaxy
+DISCARD_SESSION_KEYRING_ON_STARTUP = False
+TRUST_UID_DOMAIN = true" > /etc/condor/condor_config.local
+ fi
+
+ if [[ -e /export/condor_config ]]
+ then
+ echo "Replacing Condor config by locally supplied config from /export/condor_config"
+ rm -f /etc/condor/condor_config
+ ln -s /export/condor_config /etc/condor/condor_config
+ fi
+ fi
+fi
+
+
+# Copy or link the slurm/munge config files
+if [ -e /export/slurm.conf ]
+then
+ rm -f /etc/slurm-llnl/slurm.conf
+ ln -s /export/slurm.conf /etc/slurm-llnl/slurm.conf
+else
+ # Configure SLURM with runtime hostname.
+ # Use absolute path to python so virtualenv is not used.
+ /usr/bin/python /usr/sbin/configure_slurm.py
+fi
+if [ -e /export/munge.key ]
+then
+ rm -f /etc/munge/munge.key
+ ln -s /export/munge.key /etc/munge/munge.key
+ chmod 400 /export/munge.key
+fi
+
+# link the gridengine config file
+if [ -e /export/act_qmaster ]
+then
+ rm -f /var/lib/gridengine/default/common/act_qmaster
+ ln -s /export/act_qmaster /var/lib/gridengine/default/common/act_qmaster
+fi
+
+# Waits until postgres is ready
+function wait_for_postgres {
+ echo "Checking if database is up and running"
+ until /usr/local/bin/check_database.py 2>&1 >/dev/null; do sleep 1; echo "Waiting for database"; done
+ echo "Database connected"
+}
+
+# $NONUSE can be set to include cron, proftp, reports or nodejs
+# if included we will _not_ start these services.
+function start_supervisor {
+ supervisord -c /etc/supervisor/supervisord.conf
+ sleep 5
+
+ if [[ ! -z $SUPERVISOR_MANAGE_POSTGRES && ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ echo "Starting postgres"
+ supervisorctl start postgresql
+ fi
+ fi
+
+ wait_for_postgres
+
+ # Make sure the database is automatically updated
+ if [[ ! -z $GALAXY_AUTO_UPDATE_DB ]]
+ then
+ echo "Updating Galaxy database"
+ sh manage_db.sh -c /etc/galaxy/galaxy.yml upgrade
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_CRON ]]; then
+ if [[ $NONUSE != *"cron"* ]]
+ then
+ echo "Starting cron"
+ supervisorctl start cron
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_PROFTP ]]; then
+ if [[ $NONUSE != *"proftp"* ]]
+ then
+ echo "Starting ProFTP"
+ supervisorctl start proftpd
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_REPORTS ]]; then
+ if [[ $NONUSE != *"reports"* ]]
+ then
+ echo "Starting Galaxy reports webapp"
+ supervisorctl start reports
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_IE_PROXY ]]; then
+ if [[ $NONUSE != *"nodejs"* ]]
+ then
+ echo "Starting nodejs"
+ supervisorctl start galaxy:galaxy_nodejs_proxy
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_CONDOR ]]; then
+ if [[ $NONUSE != *"condor"* ]]
+ then
+ echo "Starting condor"
+ supervisorctl start condor
+ fi
+ fi
+
+ if [[ ! -z $SUPERVISOR_MANAGE_SLURM ]]; then
+ if [[ $NONUSE != *"slurmctld"* ]]
+ then
+ echo "Starting slurmctld"
+ supervisorctl start slurmctld
+ fi
+ if [[ $NONUSE != *"slurmd"* ]]
+ then
+ echo "Starting slurmd"
+ supervisorctl start slurmd
+ fi
+ supervisorctl start munge
+ else
+ if [[ $NONUSE != *"slurmctld"* ]]
+ then
+ echo "Starting slurmctld"
+ /usr/sbin/slurmctld -L $GALAXY_LOGS_DIR/slurmctld.log
+ fi
+ if [[ $NONUSE != *"slurmd"* ]]
+ then
+ echo "Starting slurmd"
+ /usr/sbin/slurmd -L $GALAXY_LOGS_DIR/slurmd.log
+ fi
+
+ # We need to run munged regardless
+ mkdir -p /var/run/munge && /usr/sbin/munged -f
+ fi
+}
+
+if [[ ! -z $SUPERVISOR_POSTGRES_AUTOSTART ]]; then
+ if [[ $NONUSE != *"postgres"* ]]
+ then
+ # Change the data_directory of postgresql in the main config file
+ ansible localhost -m lineinfile -a "line='data_directory = \'$PG_DATA_DIR_HOST\'' dest=$PG_CONF_DIR_DEFAULT/postgresql.conf backup=yes state=present regexp='data_directory'" &> /dev/null
+ fi
+fi
+
+if $PRIVILEGED; then
+ echo "Enable Galaxy Interactive Environments."
+ export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY="config/plugins/interactive_environments"
+ if [ x$DOCKER_PARENT == "x" ]; then
+ #build the docker in docker environment
+ bash /root/cgroupfs_mount.sh
+ start_supervisor
+ supervisorctl start docker
+ else
+ #inheriting /var/run/docker.sock from parent, assume that you need to
+ #run docker with sudo to validate
+ echo "$GALAXY_USER ALL = NOPASSWD : ALL" >> /etc/sudoers
+ start_supervisor
+ fi
+ if [[ ! -z $PULL_IE_IMAGES ]]; then
+ echo "About to pull IE images. Depending on the size, this may take a while!"
+
+ for ie in {JUPYTER,RSTUDIO,ETHERCALC,PHINCH,NEO}; do
+ enabled_var_name="GALAXY_EXTRAS_IE_FETCH_${ie}";
+ if [[ ${!enabled_var_name} ]]; then
+ # Store name in a var
+ image_var_name="GALAXY_EXTRAS_${ie}_IMAGE"
+ # And then read from that var
+ docker pull "${!image_var_name}"
+ fi
+ done
+ fi
+
+ # in privileged mode autofs and CVMFS is available
+ # install autofs
+ echo "Installing autofs to enable automatic CVMFS mounts"
+ apt-get install autofs --no-install-recommends -y
+ apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/*
+else
+ echo "Disable Galaxy Interactive Environments. Start with --privileged to enable IE's."
+ export GALAXY_CONFIG_INTERACTIVE_ENVIRONMENT_PLUGINS_DIRECTORY=""
+ start_supervisor
+fi
+
+if [ "$USE_HTTPS_LETSENCRYPT" != "False" ]
+then
+ echo "Settting up letsencrypt"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=letsencrypt \
+ --extra-vars galaxy_extras_galaxy_domain="GALAXY_CONFIG_GALAXY_INFRASTRUCTURE_URL" \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+fi
+if [ "$USE_HTTPS" != "False" ]
+then
+ if [ -f /export/server.key -a -f /export/server.crt ]
+ then
+ echo "Copying SSL keys"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=own \
+ --extra-vars src_nginx_ssl_certificate_key=/export/server.key \
+ --extra-vars src_nginx_ssl_certificate=/export/server.crt \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+ else
+ echo "Setting up self-signed SSL keys"
+ ansible-playbook -c local /ansible/provision.yml \
+ --extra-vars gather_facts=False \
+ --extra-vars galaxy_extras_config_ssl=True \
+ --extra-vars galaxy_extras_config_ssl_method=self-signed \
+ --extra-vars galaxy_extras_config_nginx_upload=False \
+ --tags https
+ fi
+fi
+
+# In case the user wants the default admin to be created, do so.
+if [[ ! -z $GALAXY_DEFAULT_ADMIN_USER ]]
+ then
+ echo "Creating admin user $GALAXY_DEFAULT_ADMIN_USER with key $GALAXY_DEFAULT_ADMIN_KEY and password $GALAXY_DEFAULT_ADMIN_PASSWORD if not existing"
+ python /usr/local/bin/create_galaxy_user.py --user "$GALAXY_DEFAULT_ADMIN_EMAIL" --password "$GALAXY_DEFAULT_ADMIN_PASSWORD" \
+ -c "$GALAXY_CONFIG_FILE" --username "$GALAXY_DEFAULT_ADMIN_USER" --key "$GALAXY_DEFAULT_ADMIN_KEY"
+fi
+# If there is a need to execute actions that would require a live galaxy instance, such as adding workflows, setting quotas, adding more users, etc.
+# then place a file with that logic named post-start-actions.sh on the /export/ directory, it should have access to all environment variables
+# visible here.
+# The file needs to be executable (chmod a+x post-start-actions.sh)
+# uses ephemeris, present in docker-galaxy-stable, to wait for the local instance
+
+if [[ -f /export/post-start-actions.sh ]]
+ then
+ /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > $GALAXY_LOGS_DIR/post-start-actions.log
+ /export/post-start-actions.sh >> $GALAXY_LOGS_DIR/post-start-actions.log &
+ else
+ echo "No /export/post-start-actions.sh found or not executable so not running" >> $GALAXY_LOGS_DIR/post-start-actions.log
+fi
+
+
+# Reinstall tools if the user want to
+if [[ ! -z $GALAXY_AUTO_UPDATE_TOOLS ]]
+ then
+ /tool_deps/_conda/bin/galaxy-wait -g http://127.0.0.1 -v --timeout 120 > /home/galaxy/logs/post-start-actions.log &&
+ OLDIFS=$IFS
+ IFS=','
+ for TOOL_YML in `echo "$GALAXY_AUTO_UPDATE_TOOLS"`
+ do
+ echo "Installing tools from $TOOL_YML"
+ /tool_deps/_conda/bin/shed-tools install -g "http://127.0.0.1" -a "$GALAXY_DEFAULT_ADMIN_KEY" -t "$TOOL_YML"
+ /tool_deps/_conda/bin/conda clean --tarballs --yes
+ done
+ IFS=$OLDIFS
+fi
+
+# migrate custom IEs or Visualisations (Galaxy plugins)
+# this is needed for by the new client build system
+python3 ${GALAXY_ROOT}/scripts/plugin_staging.py
+
+# Enable verbose output
+if [ `echo ${GALAXY_LOGGING:-'no'} | tr [:upper:] [:lower:]` = "full" ]
+ then
+ tail -f /var/log/supervisor/* /var/log/nginx/* $GALAXY_LOGS_DIR/*.log
+ else
+ tail -f $GALAXY_LOGS_DIR/*.log
+fi
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/html_dir.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/html_dir.py Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,180 @@
+
+class tooloutHTMLifyer(self):
+
+ def compressPDF(self,inpdf=None,thumbformat='png'):
+ """need absolute path to pdf
+ note that GS gets confoozled if no $TMP or $TEMP
+ so we set it
+ """
+ assert os.path.isfile(inpdf), "## Input %s supplied to %s compressPDF not found" % (inpdf,self.myName)
+ hlog = os.path.join(self.opts.output_dir,"compress_%s.txt" % os.path.basename(inpdf))
+ sto = open(hlog,'a')
+ our_env = os.environ.copy()
+ our_tmp = our_env.get('TMP',None)
+ if not our_tmp:
+ our_tmp = our_env.get('TEMP',None)
+ if not (our_tmp and os.path.exists(our_tmp)):
+ newtmp = os.path.join(self.opts.output_dir,'tmp')
+ try:
+ os.mkdir(newtmp)
+ except:
+ sto.write('## WARNING - cannot make %s - it may exist or permissions need fixing\n' % newtmp)
+ our_env['TEMP'] = newtmp
+ if not self.temp_warned:
+ sto.write('## WARNING - no $TMP or $TEMP!!! Please fix - using %s temporarily\n' % newtmp)
+ self.temp_warned = True
+ outpdf = '%s_compressed' % inpdf
+ cl = ["gs", "-sDEVICE=pdfwrite", "-dNOPAUSE", "-dUseCIEColor", "-dBATCH","-dPDFSETTINGS=/printer", "-sOutputFile=%s" % outpdf,inpdf]
+ x = subprocess.Popen(cl,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
+ retval1 = x.wait()
+ sto.close()
+ if retval1 == 0:
+ os.unlink(inpdf)
+ shutil.move(outpdf,inpdf)
+ os.unlink(hlog)
+ hlog = os.path.join(self.opts.output_dir,"thumbnail_%s.txt" % os.path.basename(inpdf))
+ sto = open(hlog,'w')
+ outpng = '%s.%s' % (os.path.splitext(inpdf)[0],thumbformat)
+ if self.useGM:
+ cl2 = ['gm', 'convert', inpdf, outpng]
+ else: # assume imagemagick
+ cl2 = ['convert', inpdf, outpng]
+ x = subprocess.Popen(cl2,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
+ retval2 = x.wait()
+ sto.close()
+ if retval2 == 0:
+ os.unlink(hlog)
+ retval = retval1 or retval2
+ return retval
+
+
+ def getfSize(self,fpath,outpath):
+ """
+ format a nice file size string
+ """
+ size = ''
+ fp = os.path.join(outpath,fpath)
+ if os.path.isfile(fp):
+ size = '0 B'
+ n = float(os.path.getsize(fp))
+ if n > 2**20:
+ size = '%1.1f MB' % (n/2**20)
+ elif n > 2**10:
+ size = '%1.1f KB' % (n/2**10)
+ elif n > 0:
+ size = '%d B' % (int(n))
+ return size
+
+ def makeHtml(self):
+ """ Create an HTML file content to list all the artifacts found in the output_dir
+ """
+
+ galhtmlprefix = """
+
+
+
+
+
+
+
+
+ """
+ galhtmlattr = """
"""
+ galhtmlpostfix = """
\n"""
+
+ flist = os.listdir(self.opts.output_dir)
+ flist = [x for x in flist if x != 'Rplots.pdf']
+ flist.sort()
+ html = []
+ html.append(galhtmlprefix % progname)
+ html.append('Galaxy Tool "%s" run at %s
' % (self.toolname,timenow()))
+ fhtml = []
+ if len(flist) > 0:
+ logfiles = [x for x in flist if x.lower().endswith('.log')] # log file names determine sections
+ logfiles.sort()
+ logfiles = [x for x in logfiles if os.path.abspath(x) != os.path.abspath(self.tlog)]
+ logfiles.append(os.path.abspath(self.tlog)) # make it the last one
+ pdflist = []
+ npdf = len([x for x in flist if os.path.splitext(x)[-1].lower() == '.pdf'])
+ for rownum,fname in enumerate(flist):
+ dname,e = os.path.splitext(fname)
+ sfsize = self.getfSize(fname,self.opts.output_dir)
+ if e.lower() == '.pdf' : # compress and make a thumbnail
+ thumb = '%s.%s' % (dname,self.thumbformat)
+ pdff = os.path.join(self.opts.output_dir,fname)
+ retval = self.compressPDF(inpdf=pdff,thumbformat=self.thumbformat)
+ if retval == 0:
+ pdflist.append((fname,thumb))
+ else:
+ pdflist.append((fname,fname))
+ if (rownum+1) % 2 == 0:
+ fhtml.append('%s %s ' % (fname,fname,sfsize))
+ else:
+ fhtml.append('%s %s ' % (fname,fname,sfsize))
+ for logfname in logfiles: # expect at least tlog - if more
+ if os.path.abspath(logfname) == os.path.abspath(self.tlog): # handled later
+ sectionname = 'All tool run'
+ if (len(logfiles) > 1):
+ sectionname = 'Other'
+ ourpdfs = pdflist
+ else:
+ realname = os.path.basename(logfname)
+ sectionname = os.path.splitext(realname)[0].split('_')[0] # break in case _ added to log
+ ourpdfs = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] == sectionname]
+ pdflist = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] != sectionname] # remove
+ nacross = 1
+ npdf = len(ourpdfs)
+
+ if npdf > 0:
+ nacross = math.sqrt(npdf) ## int(round(math.log(npdf,2)))
+ if int(nacross)**2 != npdf:
+ nacross += 1
+ nacross = int(nacross)
+ width = min(400,int(1200/nacross))
+ html.append('%s images and outputs
' % sectionname)
+ html.append('(Click on a thumbnail image to download the corresponding original PDF image) ')
+ ntogo = nacross # counter for table row padding with empty cells
+ html.append('\n')
+ for i,paths in enumerate(ourpdfs):
+ fname,thumb = paths
+ s= """ \n""" % (fname,thumb,fname,width,fname)
+ if ((i+1) % nacross == 0):
+ s += ' \n'
+ ntogo = 0
+ if i < (npdf - 1): # more to come
+ s += ''
+ ntogo = nacross
+ else:
+ ntogo -= 1
+ html.append(s)
+ if html[-1].strip().endswith(' '):
+ html.append('
\n')
+ else:
+ if ntogo > 0: # pad
+ html.append(' '*ntogo)
+ html.append('\n')
+ logt = open(logfname,'r').readlines()
+ logtext = [x for x in logt if x.strip() > '']
+ html.append('%s log output
' % sectionname)
+ if len(logtext) > 1:
+ html.append('\n\n')
+ html += logtext
+ html.append('\n \n')
+ else:
+ html.append('%s is empty ' % logfname)
+ if len(fhtml) > 0:
+ fhtml.insert(0,'Output File Name (click to view) Size \n')
+ fhtml.append('
')
+ html.append('All output files available for downloading
\n')
+ html += fhtml # add all non-pdf files to the end of the display
+ else:
+ html.append('### Error - %s returned no files - please confirm that parameters are sane
' % self.opts.interpreter)
+ html.append(galhtmlpostfix)
+ htmlf = file(self.opts.output_html,'w')
+ htmlf.write('\n'.join(html))
+ htmlf.write('\n')
+ htmlf.close()
+ self.html = html
+
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/images/dynamicScriptTool.png
Binary file toolfactory/images/dynamicScriptTool.png has changed
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/rgToolFactory2.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/rgToolFactory2.py Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,746 @@
+#!/usr/bin/env python
+# rgToolFactory.py
+# see https://github.com/fubar2/toolfactory
+#
+# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012
+#
+# all rights reserved
+# Licensed under the LGPL
+# suggestions for improvement and bug fixes welcome at https://github.com/fubar2/toolfactory
+#
+# July 2020: BCC was fun and I feel like rip van winkle after 5 years.
+# Decided to
+# 1. Fix the toolfactory so it works - done for simplest case
+# 2. Fix planemo so the toolfactory function works
+# 3. Rewrite bits using galaxyxml functions where that makes sense - done
+#
+# removed all the old complications including making the new tool use this same script
+# galaxyxml now generates the tool xml https://github.com/hexylena/galaxyxml
+# No support for automatic HTML file creation from arbitrary outputs
+# essential problem is to create two command lines - one for the tool xml and a different
+# one to run the executable with the supplied test data and settings
+# Be simpler to write the tool, then run it with planemo and soak up the test outputs.
+
+
+
+import argparse
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import time
+
+import galaxyxml.tool as gxt
+import galaxyxml.tool.parameters as gxtp
+
+import lxml
+
+myversion = "V2.1 July 2020"
+verbose = True
+debug = True
+toolFactoryURL = "https://github.com/fubar2/toolfactory"
+ourdelim = "~~~"
+
+# --input_files="$input_files~~~$CL~~~$input_formats~~~$input_label
+# ~~~$input_help"
+IPATHPOS = 0
+ICLPOS = 1
+IFMTPOS = 2
+ILABPOS = 3
+IHELPOS = 4
+IOCLPOS = 5
+
+# --output_files "$otab.history_name~~~$otab.history_format~~~$otab.CL
+ONAMEPOS = 0
+OFMTPOS = 1
+OCLPOS = 2
+OOCLPOS = 3
+
+# --additional_parameters="$i.param_name~~~$i.param_value~~~
+# $i.param_label~~~$i.param_help~~~$i.param_type~~~$i.CL~~~i$.param_CLoverride"
+ANAMEPOS = 0
+AVALPOS = 1
+ALABPOS = 2
+AHELPPOS = 3
+ATYPEPOS = 4
+ACLPOS = 5
+AOVERPOS = 6
+AOCLPOS = 7
+
+
+foo = len(lxml.__version__)
+# fug you, flake8. Say my name!
+
+def timenow():
+ """return current time as a string
+ """
+ return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time()))
+
+
+def quote_non_numeric(s):
+ """return a prequoted string for non-numerics
+ useful for perl and Rscript parameter passing?
+ """
+ try:
+ _ = float(s)
+ return s
+ except ValueError:
+ return '"%s"' % s
+
+
+html_escape_table = {"&": "&", ">": ">", "<": "<", "$": r"\$"}
+
+
+def html_escape(text):
+ """Produce entities within text."""
+ return "".join(html_escape_table.get(c, c) for c in text)
+
+
+def html_unescape(text):
+ """Revert entities within text. Multiple character targets so use replace"""
+ t = text.replace("&", "&")
+ t = t.replace(">", ">")
+ t = t.replace("<", "<")
+ t = t.replace("\\$", "$")
+ return t
+
+
+def parse_citations(citations_text):
+ """
+ """
+ citations = [c for c in citations_text.split("**ENTRY**") if c.strip()]
+ citation_tuples = []
+ for citation in citations:
+ if citation.startswith("doi"):
+ citation_tuples.append(("doi", citation[len("doi") :].strip()))
+ else:
+ citation_tuples.append(
+ ("bibtex", citation[len("bibtex") :].strip())
+ )
+ return citation_tuples
+
+
+class ScriptRunner:
+ """Wrapper for an arbitrary script
+ uses galaxyxml
+
+ """
+
+ def __init__(self, args=None):
+ """
+ prepare command line cl for running the tool here
+ and prepare elements needed for galaxyxml tool generation
+ """
+
+ self.infiles = [x.split(ourdelim) for x in args.input_files]
+ self.outfiles = [x.split(ourdelim) for x in args.output_files]
+ self.addpar = [x.split(ourdelim) for x in args.additional_parameters]
+ self.args = args
+ self.cleanuppar()
+ self.lastclredirect = None
+ self.lastxclredirect = None
+ self.cl = []
+ self.xmlcl = []
+ self.is_positional = self.args.parampass == "positional"
+ aCL = self.cl.append
+ assert args.parampass in [
+ "0",
+ "argparse",
+ "positional",
+ ], 'Parameter passing in args.parampass must be "0","positional" or "argparse"'
+ self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name)
+ self.tool_id = self.tool_name
+ if self.args.interpreter_name:
+ exe = "$runMe"
+ else:
+ exe = self.args.exe_package
+ assert (
+ exe is not None
+ ), "No interpeter or executable passed in - nothing to run so cannot build"
+ self.tool = gxt.Tool(
+ self.args.tool_name,
+ self.tool_id,
+ self.args.tool_version,
+ self.args.tool_desc,
+ exe,
+ )
+ self.tinputs = gxtp.Inputs()
+ self.toutputs = gxtp.Outputs()
+ self.testparam = []
+ if (
+ self.args.runmode == "Executable" or self.args.runmode == "system"
+ ): # binary - no need
+ aCL(self.args.exe_package) # this little CL will just run
+ else:
+ self.prepScript()
+ self.elog = "%s_error_log.txt" % self.tool_name
+ self.tlog = "%s_runner_log.txt" % self.tool_name
+
+ if self.args.parampass == "0":
+ self.clsimple()
+ else:
+ clsuffix = []
+ xclsuffix = []
+ for i, p in enumerate(self.infiles):
+ if p[IOCLPOS] == "STDIN":
+ appendme = [
+ p[IOCLPOS],
+ p[ICLPOS],
+ p[IPATHPOS],
+ "< %s" % p[IPATHPOS],
+ ]
+ xappendme = [
+ p[IOCLPOS],
+ p[ICLPOS],
+ p[IPATHPOS],
+ "< $%s" % p[ICLPOS],
+ ]
+ else:
+ appendme = [p[IOCLPOS], p[ICLPOS], p[IPATHPOS], ""]
+ xappendme = [p[IOCLPOS], p[ICLPOS], "$%s" % p[ICLPOS], ""]
+ clsuffix.append(appendme)
+ xclsuffix.append(xappendme)
+ # print('##infile i=%d, appendme=%s' % (i,appendme))
+ for i, p in enumerate(self.outfiles):
+ if p[OOCLPOS] == "STDOUT":
+ self.lastclredirect = [">", p[ONAMEPOS]]
+ self.lastxclredirect = [">", "$%s" % p[OCLPOS]]
+ else:
+ clsuffix.append([p[OOCLPOS], p[OCLPOS], p[ONAMEPOS], ""])
+ xclsuffix.append(
+ [p[OOCLPOS], p[OCLPOS], "$%s" % p[ONAMEPOS], ""]
+ )
+ for p in self.addpar:
+ clsuffix.append(
+ [p[AOCLPOS], p[ACLPOS], p[AVALPOS], p[AOVERPOS]]
+ )
+ xclsuffix.append(
+ [p[AOCLPOS], p[ACLPOS], '"$%s"' % p[ANAMEPOS], p[AOVERPOS]]
+ )
+ clsuffix.sort()
+ xclsuffix.sort()
+ self.xclsuffix = xclsuffix
+ self.clsuffix = clsuffix
+ if self.args.parampass == "positional":
+ self.clpositional()
+ else:
+ self.clargparse()
+
+ def prepScript(self):
+ aCL = self.cl.append
+ rx = open(self.args.script_path, "r").readlines()
+ rx = [x.rstrip() for x in rx]
+ rxcheck = [x.strip() for x in rx if x.strip() > ""]
+ assert len(rxcheck) > 0, "Supplied script is empty. Cannot run"
+ self.script = "\n".join(rx)
+ fhandle, self.sfile = tempfile.mkstemp(
+ prefix=self.tool_name, suffix="_%s" % (self.args.interpreter_name)
+ )
+ tscript = open(self.sfile, "w")
+ tscript.write(self.script)
+ tscript.close()
+ self.indentedScript = " %s" % "\n".join(
+ [" %s" % html_escape(x) for x in rx]
+ )
+ self.escapedScript = "%s" % "\n".join(
+ [" %s" % html_escape(x) for x in rx]
+ )
+ art = "%s.%s" % (self.tool_name, self.args.interpreter_name)
+ artifact = open(art, "wb")
+ if self.args.interpreter_name == "python":
+ artifact.write(bytes("#!/usr/bin/env python\n", "utf8"))
+ artifact.write(bytes(self.script, "utf8"))
+ artifact.close()
+ aCL(self.args.interpreter_name)
+ aCL(self.sfile)
+
+ def cleanuppar(self):
+ """ positional parameters are complicated by their numeric ordinal"""
+ for i, p in enumerate(self.infiles):
+ if self.args.parampass == "positional":
+ assert p[ICLPOS].isdigit(), (
+ "Positional parameters must be ordinal integers - got %s for %s"
+ % (p[ICLPOS], p[ILABPOS])
+ )
+ p.append(p[ICLPOS])
+ if p[ICLPOS].isdigit() or self.args.parampass == "0":
+ scl = "input%d" % (i + 1)
+ p[ICLPOS] = scl
+ self.infiles[i] = p
+ for i, p in enumerate(
+ self.outfiles
+ ): # trying to automagically gather using extensions
+ if self.args.parampass == "positional" and p[OCLPOS] != "STDOUT":
+ assert p[OCLPOS].isdigit(), (
+ "Positional parameters must be ordinal integers - got %s for %s"
+ % (p[OCLPOS], p[ONAMEPOS])
+ )
+ p.append(p[OCLPOS])
+ if p[OCLPOS].isdigit() or p[OCLPOS] == "STDOUT":
+ scl = p[ONAMEPOS]
+ p[OCLPOS] = scl
+ self.outfiles[i] = p
+ for i, p in enumerate(self.addpar):
+ if self.args.parampass == "positional":
+ assert p[ACLPOS].isdigit(), (
+ "Positional parameters must be ordinal integers - got %s for %s"
+ % (p[ACLPOS], p[ANAMEPOS])
+ )
+ p.append(p[ACLPOS])
+ if p[ACLPOS].isdigit():
+ scl = "input%s" % p[ACLPOS]
+ p[ACLPOS] = scl
+ self.addpar[i] = p
+
+ def clsimple(self):
+ """ no parameters - uses < and > for i/o
+ """
+ aCL = self.cl.append
+ aCL("<")
+ aCL(self.infiles[0][IPATHPOS])
+ aCL(">")
+ aCL(self.outfiles[0][OCLPOS])
+ aXCL = self.xmlcl.append
+ aXCL("<")
+ aXCL("$%s" % self.infiles[0][ICLPOS])
+ aXCL(">")
+ aXCL("$%s" % self.outfiles[0][ONAMEPOS])
+
+ def clpositional(self):
+ # inputs in order then params
+ aCL = self.cl.append
+ for (o_v, k, v, koverride) in self.clsuffix:
+ if " " in v:
+ aCL("%s" % v)
+ else:
+ aCL(v)
+ aXCL = self.xmlcl.append
+ for (o_v, k, v, koverride) in self.xclsuffix:
+ aXCL(v)
+ if self.lastxclredirect:
+ aXCL(self.lastxclredirect[0])
+ aXCL(self.lastxclredirect[1])
+
+ def clargparse(self):
+ """ argparse style
+ """
+ aCL = self.cl.append
+ aXCL = self.xmlcl.append
+ # inputs then params in argparse named form
+ for (o_v, k, v, koverride) in self.xclsuffix:
+ if koverride > "":
+ k = koverride
+ elif len(k.strip()) == 1:
+ k = "-%s" % k
+ else:
+ k = "--%s" % k
+ aXCL(k)
+ aXCL(v)
+ for (o_v, k, v, koverride) in self.clsuffix:
+ if koverride > "":
+ k = koverride
+ elif len(k.strip()) == 1:
+ k = "-%s" % k
+ else:
+ k = "--%s" % k
+ aCL(k)
+ aCL(v)
+
+ def getNdash(self, newname):
+ if self.is_positional:
+ ndash = 0
+ else:
+ ndash = 2
+ if len(newname) < 2:
+ ndash = 1
+ return ndash
+
+ def doXMLparam(self):
+ """flake8 made me do this..."""
+ for p in self.outfiles:
+ newname, newfmt, newcl, oldcl = p
+ ndash = self.getNdash(newcl)
+ aparm = gxtp.OutputData(newcl, format=newfmt, num_dashes=ndash)
+ aparm.positional = self.is_positional
+ if self.is_positional:
+ if oldcl == "STDOUT":
+ aparm.positional = 9999999
+ aparm.command_line_override = "> $%s" % newcl
+ else:
+ aparm.positional = int(oldcl)
+ aparm.command_line_override = "$%s" % newcl
+ self.toutputs.append(aparm)
+ tp = gxtp.TestOutput(
+ name=newcl, value="%s_sample" % newcl, format=newfmt
+ )
+ self.testparam.append(tp)
+ for p in self.infiles:
+ newname = p[ICLPOS]
+ newfmt = p[IFMTPOS]
+ ndash = self.getNdash(newname)
+ if not len(p[ILABPOS]) > 0:
+ alab = p[ICLPOS]
+ else:
+ alab = p[ILABPOS]
+ aninput = gxtp.DataParam(
+ newname,
+ optional=False,
+ label=alab,
+ help=p[IHELPOS],
+ format=newfmt,
+ multiple=False,
+ num_dashes=ndash,
+ )
+ aninput.positional = self.is_positional
+ self.tinputs.append(aninput)
+ tparm = gxtp.TestParam(name=newname, value="%s_sample" % newname)
+ self.testparam.append(tparm)
+ for p in self.addpar:
+ newname, newval, newlabel, newhelp, newtype, newcl, override, oldcl = p
+ if not len(newlabel) > 0:
+ newlabel = newname
+ ndash = self.getNdash(newname)
+ if newtype == "text":
+ aparm = gxtp.TextParam(
+ newname,
+ label=newlabel,
+ help=newhelp,
+ value=newval,
+ num_dashes=ndash,
+ )
+ elif newtype == "integer":
+ aparm = gxtp.IntegerParam(
+ newname,
+ label=newname,
+ help=newhelp,
+ value=newval,
+ num_dashes=ndash,
+ )
+ elif newtype == "float":
+ aparm = gxtp.FloatParam(
+ newname,
+ label=newname,
+ help=newhelp,
+ value=newval,
+ num_dashes=ndash,
+ )
+ else:
+ raise ValueError(
+ 'Unrecognised parameter type "%s" for\
+ additional parameter %s in makeXML'
+ % (newtype, newname)
+ )
+ aparm.positional = self.is_positional
+ if self.is_positional:
+ aninput.positional = int(oldcl)
+ self.tinputs.append(aparm)
+ self.tparm = gxtp.TestParam(newname, value=newval)
+ self.testparam.append(tparm)
+
+ def doNoXMLparam(self):
+ alab = self.infiles[0][ILABPOS]
+ if len(alab) == 0:
+ alab = self.infiles[0][ICLPOS]
+ max1s = (
+ "Maximum one input if parampass is 0 - more than one input files supplied - %s"
+ % str(self.infiles)
+ )
+ assert len(self.infiles) == 1, max1s
+ newname = self.infiles[0][ICLPOS]
+ aninput = gxtp.DataParam(
+ newname,
+ optional=False,
+ label=alab,
+ help=self.infiles[0][IHELPOS],
+ format=self.infiles[0][IFMTPOS],
+ multiple=False,
+ num_dashes=0,
+ )
+ aninput.command_line_override = "< $%s" % newname
+ aninput.positional = self.is_positional
+ self.tinputs.append(aninput)
+ tp = gxtp.TestParam(name=newname, value="%s_sample" % newname)
+ self.testparam.append(tp)
+ newname = self.outfiles[0][OCLPOS]
+ newfmt = self.outfiles[0][OFMTPOS]
+ anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0)
+ anout.command_line_override = "> $%s" % newname
+ anout.positional = self.is_positional
+ self.toutputs.append(anout)
+ tp = gxtp.TestOutput(
+ name=newname, value="%s_sample" % newname, format=newfmt
+ )
+ self.testparam.append(tp)
+
+ def makeXML(self):
+ """
+ Create a Galaxy xml tool wrapper for the new script
+ Uses galaxyhtml
+ Hmmm. How to get the command line into correct order...
+ """
+ self.tool.command_line_override = self.xmlcl
+ if self.args.interpreter_name:
+ self.tool.interpreter = self.args.interpreter_name
+ if self.args.help_text:
+ helptext = open(self.args.help_text, "r").readlines()
+ helptext = [html_escape(x) for x in helptext]
+ self.tool.help = "".join([x for x in helptext])
+ else:
+ self.tool.help = (
+ "Please ask the tool author (%s) for help \
+ as none was supplied at tool generation\n"
+ % (self.args.user_email)
+ )
+ self.tool.version_command = None # do not want
+ requirements = gxtp.Requirements()
+
+ if self.args.interpreter_name:
+ if self.args.interpreter_name == "python":
+ requirements.append(
+ gxtp.Requirement(
+ "package", "python", self.args.interpreter_version
+ )
+ )
+ elif self.args.interpreter_name not in ["bash", "sh"]:
+ requirements.append(
+ gxtp.Requirement(
+ "package",
+ self.args.interpreter_name,
+ self.args.interpreter_version,
+ )
+ )
+ else:
+ if self.args.exe_package and self.args.parampass != "system":
+ requirements.append(
+ gxtp.Requirement(
+ "package",
+ self.args.exe_package,
+ self.args.exe_package_version,
+ )
+ )
+ self.tool.requirements = requirements
+ if self.args.parampass == "0":
+ self.doNoXMLparam()
+ else:
+ self.doXMLparam()
+ self.tool.outputs = self.toutputs
+ self.tool.inputs = self.tinputs
+ if self.args.runmode not in ["Executable", "system"]:
+ configfiles = gxtp.Configfiles()
+ configfiles.append(gxtp.Configfile(name="runMe", text=self.script))
+ self.tool.configfiles = configfiles
+ tests = gxtp.Tests()
+ test_a = gxtp.Test()
+ for tp in self.testparam:
+ test_a.append(tp)
+ tests.append(test_a)
+ self.tool.tests = tests
+ self.tool.add_comment(
+ "Created by %s at %s using the Galaxy Tool Factory."
+ % (self.args.user_email, timenow())
+ )
+ self.tool.add_comment("Source in git at: %s" % (toolFactoryURL))
+ self.tool.add_comment(
+ "Cite: Creating re-usable tools from scripts doi: \
+ 10.1093/bioinformatics/bts573"
+ )
+ exml = self.tool.export()
+ xf = open('%s.xml' % self.tool_name, "w")
+ xf.write(exml)
+ xf.write("\n")
+ xf.close()
+ # ready for the tarball
+
+ def makeTooltar(self):
+ """
+ a tool is a gz tarball with eg
+ /toolname/tool.xml /toolname/tool.py /toolname/test-data/test1_in.foo ...
+ NOTE names for test inputs and outputs are munged here so must
+ correspond to actual input and output names used on the generated cl
+ """
+ retval = self.run()
+ if retval:
+ sys.stderr.write(
+ "## Run failed. Cannot build yet. Please fix and retry"
+ )
+ sys.exit(1)
+ tdir = "tfout"
+ if not os.path.exists(tdir):
+ os.mkdir(tdir)
+ self.makeXML()
+ testdir = os.path.join(tdir, "test-data")
+ if not os.path.exists(testdir):
+ os.mkdir(testdir) # make tests directory
+ for p in self.infiles:
+ pth = p[IPATHPOS]
+ dest = os.path.join(testdir, "%s_sample" % p[ICLPOS])
+ shutil.copyfile(pth, dest)
+ for p in self.outfiles:
+ pth = p[OCLPOS]
+ if p[OOCLPOS] == "STDOUT" or self.args.parampass == "0":
+ pth = p[ONAMEPOS]
+ dest = os.path.join(testdir, "%s_sample" % p[ONAMEPOS])
+ shutil.copyfile(pth, dest)
+ dest = os.path.join(tdir, p[ONAMEPOS])
+ shutil.copyfile(pth, dest)
+ else:
+ pth = p[OCLPOS]
+ dest = os.path.join(testdir, "%s_sample" % p[OCLPOS])
+ shutil.copyfile(pth, dest)
+ dest = os.path.join(tdir, p[OCLPOS])
+ shutil.copyfile(pth, dest)
+
+ if os.path.exists(self.tlog) and os.stat(self.tlog).st_size > 0:
+ shutil.copyfile(self.tlog, os.path.join(testdir, "test1_log_outfiletxt"))
+ if self.args.runmode not in ["Executable", "system"]:
+ stname = os.path.join(tdir, "%s" % (self.sfile))
+ if not os.path.exists(stname):
+ shutil.copyfile(self.sfile, stname)
+ xreal = '%s.xml' % self.tool_name
+ xout = os.path.join(tdir,xreal)
+ shutil.copyfile(xreal, xout)
+ tarpath = "toolfactory_%s.tgz" % self.tool_name
+ tf = tarfile.open(tarpath, "w:gz")
+ tf.add(name=tdir, arcname=self.tool_name)
+ tf.close()
+ shutil.copyfile(tarpath, self.args.new_tool)
+ shutil.copyfile(xreal,"tool_xml.txt")
+ repdir = "TF_run_report_tempdir"
+ if not os.path.exists(repdir):
+ os.mkdir(repdir)
+ repoutnames = [x[OCLPOS] for x in self.outfiles]
+ with os.scandir('.') as outs:
+ for entry in outs:
+ if entry.name.endswith('.tgz') or not entry.is_file():
+ continue
+ if entry.name in repoutnames:
+ shutil.copyfile(entry.name,os.path.join(repdir,entry.name))
+ elif entry.name == "%s.xml" % self.tool_name:
+ shutil.copyfile(entry.name,os.path.join(repdir,"new_tool_xml"))
+ return retval
+
+ def run(self):
+ """
+ Some devteam tools have this defensive stderr read so I'm keeping with the faith
+ Feel free to update.
+ """
+ s = "run cl=%s" % str(self.cl)
+
+ logging.debug(s)
+ scl = " ".join(self.cl)
+ err = None
+ if self.args.parampass != "0":
+ ste = open(self.elog, "wb")
+ if self.lastclredirect:
+ sto = open(
+ self.lastclredirect[1], "wb"
+ ) # is name of an output file
+ else:
+ sto = open(self.tlog, "wb")
+ sto.write(
+ bytes(
+ "## Executing Toolfactory generated command line = %s\n"
+ % scl,
+ "utf8",
+ )
+ )
+ sto.flush()
+ p = subprocess.run(self.cl, shell=False, stdout=sto, stderr=ste)
+ sto.close()
+ ste.close()
+ tmp_stderr = open(self.elog, "rb")
+ err = ""
+ buffsize = 1048576
+ try:
+ while True:
+ err += str(tmp_stderr.read(buffsize))
+ if not err or len(err) % buffsize != 0:
+ break
+ except OverflowError:
+ pass
+ tmp_stderr.close()
+ retval = p.returncode
+ else: # work around special case of simple scripts that take stdin and write to stdout
+ sti = open(self.infiles[0][IPATHPOS], "rb")
+ sto = open(self.outfiles[0][ONAMEPOS], "wb")
+ # must use shell to redirect
+ p = subprocess.run(self.cl, shell=False, stdout=sto, stdin=sti)
+ retval = p.returncode
+ sto.close()
+ sti.close()
+ if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0:
+ os.unlink(self.tlog)
+ if os.path.isfile(self.elog) and os.stat(self.elog).st_size == 0:
+ os.unlink(self.elog)
+ if p.returncode != 0 and err: # problem
+ sys.stderr.write(err)
+ logging.debug("run done")
+ return retval
+
+
+def main():
+ """
+ This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml as:
+ rgBaseScriptWrapper.py --script_path "$scriptPath" --tool_name "foo" --interpreter "Rscript"
+
+ """
+ parser = argparse.ArgumentParser()
+ a = parser.add_argument
+ a("--script_path", default="")
+ a("--tool_name", default=None)
+ a("--interpreter_name", default=None)
+ a("--interpreter_version", default=None)
+ a("--exe_package", default=None)
+ a("--exe_package_version", default=None)
+ a("--input_files", default=[], action="append")
+ a("--output_files", default=[], action="append")
+ a("--user_email", default="Unknown")
+ a("--bad_user", default=None)
+ a("--make_Tool", default=None)
+ a("--help_text", default=None)
+ a("--tool_desc", default=None)
+ a("--tool_version", default=None)
+ a("--citations", default=None)
+ a("--additional_parameters", action="append", default=[])
+ a("--edit_additional_parameters", action="store_true", default=False)
+ a("--parampass", default="positional")
+ a("--tfout", default="./tfout")
+ a("--new_tool", default="new_tool")
+ a("--runmode", default=None)
+ args = parser.parse_args()
+ assert not args.bad_user, (
+ 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to "admin_users" in the Galaxy configuration file'
+ % (args.bad_user, args.bad_user)
+ )
+ assert (
+ args.tool_name
+ ), "## Tool Factory expects a tool name - eg --tool_name=DESeq"
+ assert (
+ args.interpreter_name or args.exe_package
+ ), "## Tool Factory wrapper expects an interpreter or an executable package"
+ assert args.exe_package or (
+ len(args.script_path) > 0 and os.path.isfile(args.script_path)
+ ), "## Tool Factory wrapper expects a script path - eg --script_path=foo.R if no executable"
+ args.input_files = [
+ x.replace('"', "").replace("'", "") for x in args.input_files
+ ]
+ # remove quotes we need to deal with spaces in CL params
+ for i, x in enumerate(args.additional_parameters):
+ args.additional_parameters[i] = args.additional_parameters[i].replace(
+ '"', ""
+ )
+ r = ScriptRunner(args)
+ if args.make_Tool:
+ retcode = r.makeTooltar()
+ else:
+ retcode = r.run()
+ if retcode:
+ sys.exit(retcode) # indicate failure to job runner
+
+
+if __name__ == "__main__":
+ main()
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/rgToolFactory2.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/rgToolFactory2.xml Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,462 @@
+
+ Scripts into tools
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Yes, allow user to edit all additional parameters on the generated tool form
+ No - use the fixed values for all additional parameters - no user editing
+
+
+
+
+
+
+
+
+
+
+ text
+ integer
+ float
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ python
+ galaxyxml
+
+
+
+
+
+#if $interexe.interpreter != "Executable" and $interexe.interpreter != "system" :
+${interexe.dynScript}
+#else:
+$tool_name
+#end if
+
+
+ #if $makeMode.make_Tool == "yes":
+${makeMode.help_text}
+ #else
+$tool_name help goes here
+ #end if
+
+
+#if $makeMode.make_Tool == "yes":
+ #for $citation in $makeMode.citations:
+ #if $citation.citation_type.type == "bibtex":
+ **ENTRY**bibtex
+ ${citation.citation_type.bibtex}
+ #else
+ **ENTRY**doi
+ ${citation.citation_type.doi}
+ #end if
+ #end for
+#end if
+
+
+
+
+
+
+
+
+
+
+
+
+
+ An executable binary to be provided and managed by the Conda dependency management subsystem
+ python
+ Rscript
+ perl
+ bash
+ sh
+ A system executable found on the path such as awk/sed
+ for testing only - do not use me please
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Argparse style: passed in the form of '--[clname] [value]'
+ Positional: Passed in the order of positional ordinals '...foo.bam bar.idx zot.xls'
+ No parameters needed because tool reads selected input file from STDIN and writes STDOUT with new history output"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Generate a Galaxy ToolShed compatible toolshed.gz
+ No. Just run the script please
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ DOI
+ BibTeX
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ makeMode['make_Tool'] == "yes"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. class:: warningmark
+
+**Details and attribution**
+(see GTF_)
+
+**Local Admins ONLY**
+Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool.
+
+**If you find a bug**
+Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_
+
+**What it does**
+This tool optionally generates normal workflow compatible first class Galaxy tools
+
+Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form.
+Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user.
+Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example.
+
+Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime.
+These can be editable by the downstream user or baked in.
+
+When you run this tool, your executable or script and supplied parameter values will be run to produce a canonical
+set of outputs - these are used to construct a test for the new tool.
+
+If tool generation is required, a new tarball compatible with any Galaxy toolshed is created.
+It can be unpacked in your galaxy/tools directory and manually added to tool_conf.xml, or
+installed into any toolshed from where it can be installed into your Galaxy.
+
+
+.. class:: warningmark
+
+**Note to system administrators**
+This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances.
+Admin_users will have the power to do anything they want as the Galaxy user if you install this tool.
+
+.. class:: warningmark
+
+**Use on public servers** is STRONGLY discouraged for obvious reasons
+
+The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation.
+We recommend that you follow the good code hygiene practices associated with safe toolshed practices.
+
+Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing:
+
+::
+
+ # reverse order of text by row
+ import sys
+ inp = sys.argv[1]
+ outp = sys.argv[2]
+ i = open(inp,'r').readlines()
+ o = open(outp,'w')
+ for row in i:
+ rs = row.rstrip()
+ rs = list(rs)
+ rs.reverse()
+ o.write(''.join(rs))
+ o.write('\n')
+ o.close()
+
+With argparse style parameters:
+
+::
+
+ # reverse order of text by row
+ import argparse
+ parser = argparse.ArgumentParser()
+ a = parser.add_argument
+ a('--infile',default='')
+ a('--outfile',default=None)
+ args = parser.parse_args()
+ inp = args.infile
+ outp = args.outfile
+ i = open(inp,'r').readlines()
+ o = open(outp,'w')
+ for row in i:
+ rs = row.rstrip()
+ rs = list(rs)
+ rs.reverse()
+ o.write(''.join(rs))
+ o.write('\n')
+ o.close()
+
+
+Paper_ :
+
+Creating re-usable tools from scripts: The Galaxy Tool Factory
+Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
+Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
+
+**Licensing**
+
+Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012
+All rights reserved.
+Licensed under the LGPL_
+
+.. _LGPL: http://www.gnu.org/copyleft/lesser.html
+.. _GTF: https://github.com/fubar2/toolfactory
+.. _Paper: http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573
+
+
+
+
+ 10.1093/bioinformatics/bts573
+
+
+
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/test-data/input1_sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/test-data/input1_sample Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,166 @@
+*WARNING before you start*
+
+ Install this tool on a private Galaxy ONLY
+ Please NEVER on a public or production instance
+
+Updated august 2014 by John Chilton adding citation support
+
+Updated august 8 2014 to fix bugs reported by Marius van den Beek
+
+Please cite the resource at
+http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
+if you use this tool in your published work.
+
+**Short Story**
+
+This is an unusual Galaxy tool capable of generating new Galaxy tools.
+It works by exposing *unrestricted* and therefore extremely dangerous scripting
+to all designated administrators of the host Galaxy server, allowing them to
+run scripts in R, python, sh and perl over multiple selected input data sets,
+writing a single new data set as output.
+
+*You have a working r/python/perl/bash script or any executable with positional or argparse style parameters*
+
+It can be turned into an ordinary Galaxy tool in minutes, using a Galaxy tool.
+
+
+**Automated generation of new Galaxy tools for installation into any Galaxy**
+
+A test is generated using small sample test data inputs and parameter settings you supply.
+Once the test case outputs have been produced, they can be used to build a
+new Galaxy tool. The supplied script or executable is baked as a requirement
+into a new, ordinary Galaxy tool, fully workflow compatible out of the box.
+Generated tools are installed via a tool shed by an administrator
+and work exactly like all other Galaxy tools for your users.
+
+**More Detail**
+
+To use the ToolFactory, you should have prepared a script to paste into a
+text box, or have a package in mind and a small test input example ready to select from your history
+to test your new script.
+
+```planemo test rgToolFactory2.xml --galaxy_root ~/galaxy --test_data ~/galaxy/tools/tool_makers/toolfactory/test-data``` works for me
+
+There is an example in each scripting language on the Tool Factory form. You
+can just cut and paste these to try it out - remember to select the right
+interpreter please. You'll also need to create a small test data set using
+the Galaxy history add new data tool.
+
+If the script fails somehow, use the "redo" button on the tool output in
+your history to recreate the form complete with broken script. Fix the bug
+and execute again. Rinse, wash, repeat.
+
+Once the script runs sucessfully, a new Galaxy tool that runs your script
+can be generated. Select the "generate" option and supply some help text and
+names. The new tool will be generated in the form of a new Galaxy datatype
+*toolshed.gz* - as the name suggests, it's an archive ready to upload to a
+Galaxy ToolShed as a new tool repository.
+
+Once it's in a ToolShed, it can be installed into any local Galaxy server
+from the server administrative interface.
+
+Once the new tool is installed, local users can run it - each time, the script
+that was supplied when it was built will be executed with the input chosen
+from the user's history. In other words, the tools you generate with the
+ToolFactory run just like any other Galaxy tool,but run your script every time.
+
+Tool factory tools are perfect for workflow components. One input, one output,
+no variables.
+
+*To fully and safely exploit the awesome power* of this tool,
+Galaxy and the ToolShed, you should be a developer installing this
+tool on a private/personal/scratch local instance where you are an
+admin_user. Then, if you break it, you get to keep all the pieces see
+https://bitbucket.org/fubar/galaxytoolfactory/wiki/Home
+
+**Installation**
+This is a Galaxy tool. You can install it most conveniently using the
+administrative "Search and browse tool sheds" link. Find the Galaxy Main
+toolshed at https://toolshed.g2.bx.psu.edu/ and search for the toolfactory
+repository. Open it and review the code and select the option to install it.
+
+If you can't get the tool that way, the xml and py files here need to be
+copied into a new tools
+subdirectory such as tools/toolfactory Your tool_conf.xml needs a new entry
+pointing to the xml
+file - something like::
+
+
+
+If not already there,
+please add:
+
+to your local data_types_conf.xml.
+
+
+**Restricted execution**
+
+The tool factory tool itself will then be usable ONLY by admin users -
+people with IDs in admin_users in universe_wsgi.ini **Yes, that's right. ONLY
+admin_users can run this tool** Think about it for a moment. If allowed to
+run any arbitrary script on your Galaxy server, the only thing that would
+impede a miscreant bent on destroying all your Galaxy data would probably
+be lack of appropriate technical skills.
+
+**What it does**
+
+This is a tool factory for simple scripts in python, R and
+perl currently. Functional tests are automatically generated. How cool is that.
+
+LIMITED to simple scripts that read one input from the history. Optionally can
+write one new history dataset, and optionally collect any number of outputs
+into links on an autogenerated HTML index page for the user to navigate -
+useful if the script writes images and output files - pdf outputs are shown
+as thumbnails and R's bloated pdf's are shrunk with ghostscript so that and
+imagemagik need to be available.
+
+Generated tools can be edited and enhanced like any Galaxy tool, so start
+small and build up since a generated script gets you a serious leg up to a
+more complex one.
+
+**What you do**
+
+You paste and run your script, you fix the syntax errors and
+eventually it runs. You can use the redo button and edit the script before
+trying to rerun it as you debug - it works pretty well.
+
+Once the script works on some test data, you can generate a toolshed compatible
+gzip file containing your script ready to run as an ordinary Galaxy tool in
+a repository on your local toolshed. That means safe and largely automated
+installation in any production Galaxy configured to use your toolshed.
+
+**Generated tool Security**
+
+Once you install a generated tool, it's just
+another tool - assuming the script is safe. They just run normally and their
+user cannot do anything unusually insecure but please, practice safe toolshed.
+Read the code before you install any tool. Especially this one - it is really scary.
+
+**Send Code**
+
+Patches and suggestions welcome as bitbucket issues please?
+
+**Attribution**
+
+Creating re-usable tools from scripts: The Galaxy Tool Factory
+Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team
+Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573
+
+http://bioinformatics.oxfordjournals.org/cgi/reprint/bts573?ijkey=lczQh1sWrMwdYWJ&keytype=ref
+
+**Licensing**
+
+Copyright Ross Lazarus 2010
+ross lazarus at g mail period com
+
+All rights reserved.
+
+Licensed under the LGPL
+
+**Obligatory screenshot**
+
+http://bitbucket.org/fubar/galaxytoolmaker/src/fda8032fe989/images/dynamicScriptTool.png
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/test-data/output2_sample
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/test-data/output2_sample Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,165 @@
+*trats uoy erofeb GNINRAW*
+
+YLNO yxalaG etavirp a no loot siht llatsnI
+ecnatsni noitcudorp ro cilbup a no REVEN esaelP
+
+troppus noitatic gnidda notlihC nhoJ yb 4102 tsugua detadpU
+
+keeB ned nav suiraM yb detroper sgub xif ot 4102 8 tsugua detadpU
+
+ta ecruoser eht etic esaelP
+fer=epytyek&JWYdwMrWs1hQzcl=yekji?375stb/tnirper/igc/gro.slanruojdrofxo.scitamrofnioib//:ptth
+.krow dehsilbup ruoy ni loot siht esu uoy fi
+
+**yrotS trohS**
+
+.sloot yxalaG wen gnitareneg fo elbapac loot yxalaG lausunu na si sihT
+gnitpircs suoregnad ylemertxe erofereht dna *detcirtsernu* gnisopxe yb skrow tI
+ot meht gniwolla ,revres yxalaG tsoh eht fo srotartsinimda detangised lla ot
+,stes atad tupni detceles elpitlum revo lrep dna hs ,nohtyp ,R ni stpircs nur
+.tuptuo sa tes atad wen elgnis a gnitirw
+
+*sretemarap elyts esrapgra ro lanoitisop htiw elbatucexe yna ro tpircs hsab/lrep/nohtyp/r gnikrow a evah uoY*
+
+.loot yxalaG a gnisu ,setunim ni loot yxalaG yranidro na otni denrut eb nac tI
+
+
+**yxalaG yna otni noitallatsni rof sloot yxalaG wen fo noitareneg detamotuA**
+
+.ylppus uoy sgnittes retemarap dna stupni atad tset elpmas llams gnisu detareneg si tset A
+a dliub ot desu eb nac yeht ,decudorp neeb evah stuptuo esac tset eht ecnO
+tnemeriuqer a sa dekab si elbatucexe ro tpircs deilppus ehT .loot yxalaG wen
+.xob eht fo tuo elbitapmoc wolfkrow ylluf ,loot yxalaG yranidro ,wen a otni
+rotartsinimda na yb dehs loot a aiv dellatsni era sloot detareneG
+.sresu ruoy rof sloot yxalaG rehto lla ekil yltcaxe krow dna
+
+**liateD eroM**
+
+a otni etsap ot tpircs a deraperp evah dluohs uoy ,yrotcaFlooT eht esu oT
+yrotsih ruoy morf tceles ot ydaer elpmaxe tupni tset llams a dna dnim ni egakcap a evah ro ,xob txet
+.tpircs wen ruoy tset ot
+
+em rof skrow ```atad-tset/yrotcafloot/srekam_loot/sloot/yxalag/~ atad_tset-- yxalag/~ toor_yxalag-- lmx.2yrotcaFlooTgr tset omenalp```
+
+uoY .mrof yrotcaF looT eht no egaugnal gnitpircs hcae ni elpmaxe na si erehT
+thgir eht tceles ot rebmemer - tuo ti yrt ot eseht etsap dna tuc tsuj nac
+gnisu tes atad tset llams a etaerc ot deen osla ll'uoY .esaelp reterpretni
+.loot atad wen dda yrotsih yxalaG eht
+
+ni tuptuo loot eht no nottub "oder" eht esu ,wohemos sliaf tpircs eht fI
+gub eht xiF .tpircs nekorb htiw etelpmoc mrof eht etaercer ot yrotsih ruoy
+.taeper ,hsaw ,esniR .niaga etucexe dna
+
+tpircs ruoy snur taht loot yxalaG wen a ,yllufssecus snur tpircs eht ecnO
+dna txet pleh emos ylppus dna noitpo "etareneg" eht tceleS .detareneg eb nac
+epytatad yxalaG wen a fo mrof eht ni detareneg eb lliw loot wen ehT .seman
+a ot daolpu ot ydaer evihcra na s'ti ,stseggus eman eht sa - *zg.dehsloot*
+.yrotisoper loot wen a sa dehSlooT yxalaG
+
+revres yxalaG lacol yna otni dellatsni eb nac ti ,dehSlooT a ni s'ti ecnO
+.ecafretni evitartsinimda revres eht morf
+
+tpircs eht ,emit hcae - ti nur nac sresu lacol ,dellatsni si loot wen eht ecnO
+nesohc tupni eht htiw detucexe eb lliw tliub saw ti nehw deilppus saw taht
+eht htiw etareneg uoy sloot eht ,sdrow rehto nI .yrotsih s'resu eht morf
+.emit yreve tpircs ruoy nur tub,loot yxalaG rehto yna ekil tsuj nur yrotcaFlooT
+
+,tuptuo eno ,tupni enO .stnenopmoc wolfkrow rof tcefrep era sloot yrotcaf looT
+.selbairav on
+
+,loot siht fo *rewop emosewa eht tiolpxe ylefas dna ylluf oT*
+siht gnillatsni repoleved a eb dluohs uoy ,dehSlooT eht dna yxalaG
+na era uoy erehw ecnatsni lacol hctarcs/lanosrep/etavirp a no loot
+ees seceip eht lla peek ot teg uoy ,ti kaerb uoy fi ,nehT .resu_nimda
+emoH/ikiw/yrotcaflootyxalag/rabuf/gro.tekcubtib//:sptth
+
+**noitallatsnI**
+eht gnisu yltneinevnoc tsom ti llatsni nac uoY .loot yxalaG a si sihT
+niaM yxalaG eht dniF .knil "sdehs loot esworb dna hcraeS" evitartsinimda
+yrotcafloot eht rof hcraes dna /ude.usp.xb.2g.dehsloot//:sptth ta dehsloot
+.ti llatsni ot noitpo eht tceles dna edoc eht weiver dna ti nepO .yrotisoper
+
+eb ot deen ereh selif yp dna lmx eht ,yaw taht loot eht teg t'nac uoy fI
+sloot wen a otni deipoc
+yrtne wen a sdeen lmx.fnoc_loot ruoY yrotcafloot/sloot sa hcus yrotceridbus
+lmx eht ot gnitniop
+::ekil gnihtemos - elif
+
+>"sredliubloot"=di "sloot gnidliub looT"=eman noitces<
+>/"lmx.yrotcaFlooTgr/yrotcafloot"=elif loot<
+>noitces/<
+
+,ereht ydaerla ton fI
+:dda esaelp
+"yraniB:yranib.sepytatad.yxalag"=epyt "zg.dehsloot"=noisnetxe epytatad<
+>/ "eurT"=ssalcbus "pizg-x/trapitlum"=epytemim
+.lmx.fnoc_sepyt_atad lacol ruoy ot
+
+
+**noitucexe detcirtseR**
+
+- sresu nimda yb YLNO elbasu eb neht lliw flesti loot yrotcaf loot ehT
+YLNO .thgir s'taht ,seY** ini.igsw_esrevinu ni sresu_nimda ni sDI htiw elpoep
+ot dewolla fI .tnemom a rof ti tuoba knihT **loot siht nur nac sresu_nimda
+dluow taht gniht ylno eht ,revres yxalaG ruoy no tpircs yrartibra yna nur
+ylbaborp dluow atad yxalaG ruoy lla gniyortsed no tneb tnaercsim a edepmi
+.slliks lacinhcet etairporppa fo kcal eb
+
+**seod ti tahW**
+
+dna R ,nohtyp ni stpircs elpmis rof yrotcaf loot a si sihT
+.taht si looc woH .detareneg yllacitamotua era stset lanoitcnuF .yltnerruc lrep
+
+nac yllanoitpO .yrotsih eht morf tupni eno daer taht stpircs elpmis ot DETIMIL
+stuptuo fo rebmun yna tcelloc yllanoitpo dna ,tesatad yrotsih wen eno etirw
+- etagivan ot resu eht rof egap xedni LMTH detarenegotua na no sknil otni
+nwohs era stuptuo fdp - selif tuptuo dna segami setirw tpircs eht fi lufesu
+dna taht os tpircstsohg htiw knurhs era s'fdp detaolb s'R dna slianbmuht sa
+.elbaliava eb ot deen kigamegami
+
+trats os ,loot yxalaG yna ekil decnahne dna detide eb nac sloot detareneG
+a ot pu gel suoires a uoy steg tpircs detareneg a ecnis pu dliub dna llams
+.eno xelpmoc erom
+
+**od uoy tahW**
+
+dna srorre xatnys eht xif uoy ,tpircs ruoy nur dna etsap uoY
+erofeb tpircs eht tide dna nottub oder eht esu nac uoY .snur ti yllautneve
+.llew ytterp skrow ti - gubed uoy sa ti nurer ot gniyrt
+
+elbitapmoc dehsloot a etareneg nac uoy ,atad tset emos no skrow tpircs eht ecnO
+ni loot yxalaG yranidro na sa nur ot ydaer tpircs ruoy gniniatnoc elif pizg
+detamotua ylegral dna efas snaem tahT .dehsloot lacol ruoy no yrotisoper a
+.dehsloot ruoy esu ot derugifnoc yxalaG noitcudorp yna ni noitallatsni
+
+**ytiruceS loot detareneG**
+
+tsuj s'ti ,loot detareneg a llatsni uoy ecnO
+rieht dna yllamron nur tsuj yehT .efas si tpircs eht gnimussa - loot rehtona
+.dehsloot efas ecitcarp ,esaelp tub erucesni yllausunu gnihtyna od tonnac resu
+.yracs yllaer si ti - eno siht yllaicepsE .loot yna llatsni uoy erofeb edoc eht daeR
+
+**edoC dneS**
+
+?esaelp seussi tekcubtib sa emoclew snoitseggus dna sehctaP
+
+**noitubirttA**
+
+yrotcaF looT yxalaG ehT :stpircs morf sloot elbasu-er gnitaerC
+maeT yxalaG ehT ;nnameiZ kraM ;ipsaK ynotnA ;surazaL ssoR
+375stb/scitamrofnioib/3901.01 :iod ;2102 scitamrofnioiB
+
+fer=epytyek&JWYdwMrWs1hQzcl=yekji?375stb/tnirper/igc/gro.slanruojdrofxo.scitamrofnioib//:ptth
+
+**gnisneciL**
+
+0102 surazaL ssoR thgirypoC
+moc doirep liam g ta surazal ssor
+
+.devreser sthgir llA
+
+LPGL eht rednu desneciL
+
+**tohsneercs yrotagilbO**
+
+gnp.looTtpircScimanyd/segami/989ef2308adf/crs/rekamlootyxalag/rabuf/gro.tekcubtib//:ptth
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/test-data/pyrevpos.python
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/test-data/pyrevpos.python Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,13 @@
+# reverse order of text by row
+import sys
+inp = sys.argv[1]
+outp = sys.argv[2]
+i = open(inp,'r').readlines()
+o = open(outp,'w')
+for row in i:
+ rs = row.rstrip()
+ rs = list(rs)
+ rs.reverse()
+ o.write(''.join(rs))
+o.close()
+
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/test-data/test1_log.txt
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/test-data/test1_log.txt Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,1 @@
+## Executing Toolfactory generated command line = python /tmp/pyrevposq5dmcdy1.python /tmp/tmpqrksf8sd/files/5/b/9/dataset_5b952a86-87df-44ad-a415-ea549f3f0cee.dat output2
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/test-data/toolfactory_pyrevpos_tgz_sample
Binary file toolfactory/test-data/toolfactory_pyrevpos_tgz_sample has changed
diff -r b938475235e3 -r e7e9732ebed6 toolfactory/testtf.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/toolfactory/testtf.sh Sun Aug 16 08:51:14 2020 -0400
@@ -0,0 +1,2 @@
+planemo test --no_cleanup --no_dependency_resolution --skip_venv --galaxy_root ~/galaxy ~/galaxy/tools/tool_makers/toolfactory &>foo
+