# HG changeset patch # User fubar # Date 1619414726 0 # Node ID 2a46da701dde30ce3e319086f87fe3641fcb4eeb # Parent c4f192ec521c7492dd50c08d12e9526a1f050b1b Uploaded diff -r c4f192ec521c -r 2a46da701dde toolfactory/.github/workflows/commit.yml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/.github/workflows/commit.yml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,67 @@ +name: Galaxy Tool Linting and Tests for PR +# run planemo on a git repository containing a single tool +# as a github action. +# ross lazarus august 2020 +on: [pull_request,push] +env: + GALAXY_REPO: https://github.com/galaxyproject/galaxy + GALAXY_RELEASE: release_20.05 +jobs: + setup: + name: setup environment and python + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + steps: + - name: Print github context properties + run: | + echo 'event: ${{ github.event_name }}' + echo 'sha: ${{ github.sha }}' + echo 'ref: ${{ github.ref }}' + echo 'head_ref: ${{ github.head_ref }}' + echo 'base_ref: ${{ github.base_ref }}' + echo 'event.before: ${{ github.event.before }}' + echo 'event.after: ${{ github.event.after }}' + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/checkout@v2 + with: + # planemo does not seem to want to install the requirement galaxyxml + # into the venv it manages at tool testing so do it the old skool way + repository: 'galaxyproject/galaxy' + path: 'galaxy' + - name: make venv ready for this galaxy and planemo + run: | + python3 -m venv $GITHUB_WORKSPACE/galaxy/.venv + . $GITHUB_WORKSPACE/galaxy/.venv/bin/activate + pip install --upgrade pip + pip install wheel + pip install -r $GITHUB_WORKSPACE/galaxy/requirements.txt + - name: Upgrade pip + run: pip install --upgrade pip + # Install the `wheel` package so that when installing other packages which + # are not available as wheels, pip will build a wheel for them, which can be cached. + - name: Install wheel + run: pip install wheel + - name: Install Planemo and flake8 + run: pip install planemo flake8 flake8-import-order + # galaxyxml temporarily removed until PR accepted + - uses: actions/checkout@v2 + with: + fetch-depth: 1 + - name: flake8 *.py + run: flake8 --ignore=E501,E203,W503,C901 + - name: Planemo lint + run: planemo lint . + - name: Planemo test tool + run: planemo test --galaxy_root $GITHUB_WORKSPACE/galaxy --test_output tool_test_output.html --skip_venv --test_output_json tool_test_output.json --galaxy_python_version ${{ matrix.python-version }} . + - name: Copy artifacts into place + run: | + mkdir upload + mv tool_test_output.json tool_test_output.html upload/ + - uses: actions/upload-artifact@v2.0.1 + with: + name: 'All tool test results' + path: upload diff -r c4f192ec521c -r 2a46da701dde toolfactory/.gitignore --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/.gitignore Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff -r c4f192ec521c -r 2a46da701dde toolfactory/.shed.yml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/.shed.yml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,13 @@ +name: toolfactory +owner: fubar +description: ToolFactory - tool to make Galaxy tools ready for the toolshed +homepage_url: https://github.com/fubar2/toolfactory +long_description: | + ToolFactory - turn executable packages and R/python/perl/bash scripts into ordinary Galaxy tools + + Creating re-usable tools from scripts: The Galaxy Tool Factory Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team + Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573 +remote_repository_url: https://github.com/fubar2/toolfactory +type: tool_dependency_definition +categories: +- Tool Generators diff -r c4f192ec521c -r 2a46da701dde toolfactory/README.md --- a/toolfactory/README.md Tue Apr 20 05:30:52 2021 +0000 +++ b/toolfactory/README.md Mon Apr 26 05:25:26 2021 +0000 @@ -1,9 +1,7 @@ -## Breaking news! Docker container at https://github.com/fubar2/toolfactory-galaxy-docker recommended as at December 2020 +## Breaking news! Completely refactored ### New demonstration of planemo tool_factory command ![Planemo ToolFactory demonstration](images/lintplanemo-2021-01-08_18.02.45.mkv?raw=false "Demonstration inside Planemo") -## This is the original ToolFactory suitable for non-docker situations. Please use the docker container if you can because it's integrated with a Toolshed... - # WARNING Install this tool to a throw-away private Galaxy or Docker container ONLY! diff -r c4f192ec521c -r 2a46da701dde toolfactory/ToolFactory.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory.py Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,1214 @@ + +# see https://github.com/fubar2/toolfactory +# +# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 +# +# all rights reserved +# Licensed under the LGPL +# suggestions for improvement and bug fixes welcome at +# https://github.com/fubar2/toolfactory +# +# April 2021: Refactored into two tools - generate and test/install +# as part of GTN tutorial development and biocontainer adoption +# The tester runs planemo on a non-tested archive, creates the test outputs +# and returns a new proper tool with test. +# The tester was generated from the ToolFactory_tester.py script + + +import argparse +import copy +import json +import logging +import os +import re +import shlex +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time + +from bioblend import ConnectionError +from bioblend import galaxy +from bioblend import toolshed + +import galaxyxml.tool as gxt +import galaxyxml.tool.parameters as gxtp + +import lxml.etree as ET + +import yaml + +myversion = "V2.3 April 2021" +verbose = True +debug = True +toolFactoryURL = "https://github.com/fubar2/toolfactory" +FAKEEXE = "~~~REMOVE~~~ME~~~" +# need this until a PR/version bump to fix galaxyxml prepending the exe even +# with override. + + +def timenow(): + """return current time as a string""" + return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) + +cheetah_escape_table = {"$": "\\$", "#": "\\#"} + +def cheetah_escape(text): + """Produce entities within text.""" + return "".join([cheetah_escape_table.get(c, c) for c in text]) + +def parse_citations(citations_text): + """""" + citations = [c for c in citations_text.split("**ENTRY**") if c.strip()] + citation_tuples = [] + for citation in citations: + if citation.startswith("doi"): + citation_tuples.append(("doi", citation[len("doi") :].strip())) + else: + citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) + return citation_tuples + +class ToolTester(): + # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! + # if in a container possibly not so courageous. + # Fine on your own laptop but security red flag for most production instances + # uncompress passed tar, run planemo and rebuild a new tarball with tests + + def __init__(self, report_dir, in_tool_archive, new_tool_archive, include_tests, galaxy_root): + self.new_tool_archive = new_tool_archive + self.include_tests = include_tests + self.galaxy_root = galaxy_root + self.repdir = report_dir + assert in_tool_archive and tarfile.is_tarfile(in_tool_archive) + # this is not going to go well with arbitrary names. TODO introspect tool xml! + tff = tarfile.open(in_tool_archive, "r:*") + flist = tff.getnames() + ourdir = os.path.commonpath(flist) # eg pyrevpos + self.tool_name = ourdir + ourxmls = [x for x in flist if x.lower().endswith('.xml') and os.path.split(x)[0] == ourdir] + # planemo_test/planemo_test.xml + assert len(ourxmls) > 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + tff.close() + self.update_tests(ourdir) + self.tooloutdir = ourdir + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + penv['HOME'] = os.path.join(self.galaxy_root,'planemo') + #penv["GALAXY_VIRTUAL_ENV"] = os.path.join(penv['HOME'],'.planemo','gx_venv_3.9') + penv["PIP_CACHE_DIR"] = os.path.join(self.galaxy_root,'pipcache') + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = os.path.join(self.repdir, f"{tool_name}_planemo_test_report.html") + cll = ["planemo", + "test", + #"--job_config_file", + # os.path.join(self.galaxy_root,"config","job_conf.xml"), + #"--galaxy_python_version", + #"3.9", + "--test_output", + os.path.abspath(tool_test_output), + "--galaxy_root", + self.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print("Call planemo cl =", cll) + p = subprocess.run( + cll, + capture_output=True, + encoding='utf8', + env = penv, + shell=False, + ) + return p + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tooloutdir, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def move_One(self,scandir): + with os.scandir('.') as outs: + for entry in outs: + newname = entry.name + if not entry.is_file() or entry.name.endswith('_sample'): + continue + if not (entry.name.endswith('.html') or entry.name.endswith('.gz') or entry.name.endswith(".tgz")): + fname, ext = os.path.splitext(entry.name) + if len(ext) > 1: + newname = f"{fname}_{ext[1:]}.txt" + else: + newname = f"{fname}.txt" + dest = os.path.join(self.repdir, newname) + src = entry.name + shutil.copyfile(src, dest) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + self.move_One(self.tooloutdir) + self.move_One('.') + if self.include_tests: + self.move_One(self.testdir) + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + logf = open(f"%s_run_report" % (self.tool_name),'w') + logf.write("stdout:") + logf.write(capture.stdout) + logf.write("stderr:") + logf.write(capture.stderr) + + +class ToolConfUpdater(): + # update config/tool_conf.xml with a new tool unpacked in /tools + # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! + # if in a container possibly not so courageous. + # Fine on your own laptop but security red flag for most production instances + + def __init__(self, args, tool_conf_path, new_tool_archive_path, new_tool_name, tool_dir): + self.args = args + self.tool_conf_path = tool_conf_path + self.our_name = 'ToolFactory' + tff = tarfile.open(new_tool_archive_path, "r:*") + flist = tff.getnames() + ourdir = os.path.commonpath(flist) # eg pyrevpos + self.tool_id = ourdir # they are the same for TF tools + ourxml = [x for x in flist if x.lower().endswith('.xml')] + res = tff.extractall(tool_dir) + tff.close() + self.update_toolconf(ourdir,ourxml) + + def install_deps(self): + gi = galaxy.GalaxyInstance(url=self.args.galaxy_url, key=self.args.galaxy_api_key) + x = gi.tools.install_dependencies(self.tool_id) + print(f"Called install_dependencies on {self.tool_id} - got {x}") + + def update_toolconf(self,ourdir,ourxml): # path is relative to tools + updated = False + tree = ET.parse(self.tool_conf_path) + root = tree.getroot() + hasTF = False + TFsection = None + for e in root.findall('section'): + if e.attrib['name'] == self.our_name: + hasTF = True + TFsection = e + if not hasTF: + TFsection = ET.Element('section') + root.insert(0,TFsection) # at the top! + our_tools = TFsection.findall('tool') + conf_tools = [x.attrib['file'] for x in our_tools] + for xml in ourxml: # may be > 1 + if not xml in conf_tools: # new + updated = True + ET.SubElement(TFsection, 'tool', {'file':xml}) + ET.indent(tree) + tree.write(self.tool_conf_path, pretty_print=True) + if False and self.args.packages and self.args.packages > '': + self.install_deps() + +class ScriptRunner: + """Wrapper for an arbitrary script + uses galaxyxml + + """ + + def __init__(self, args=None): # noqa + """ + prepare command line cl for running the tool here + and prepare elements needed for galaxyxml tool generation + """ + self.ourcwd = os.getcwd() + self.collections = [] + if len(args.collection) > 0: + try: + self.collections = [ + json.loads(x) for x in args.collection if len(x.strip()) > 1 + ] + except Exception: + print( + f"--collections parameter {str(args.collection)} is malformed - should be a dictionary" + ) + try: + self.infiles = [ + json.loads(x) for x in args.input_files if len(x.strip()) > 1 + ] + except Exception: + print( + f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary" + ) + try: + self.outfiles = [ + json.loads(x) for x in args.output_files if len(x.strip()) > 1 + ] + except Exception: + print( + f"--output_files parameter {args.output_files} is malformed - should be a dictionary" + ) + try: + self.addpar = [ + json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1 + ] + except Exception: + print( + f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary" + ) + try: + self.selpar = [ + json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1 + ] + except Exception: + print( + f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary" + ) + self.args = args + self.cleanuppar() + self.lastclredirect = None + self.lastxclredirect = None + self.cl = [] + self.xmlcl = [] + self.is_positional = self.args.parampass == "positional" + if self.args.sysexe: + if ' ' in self.args.sysexe: + self.executeme = self.args.sysexe.split(' ') + else: + self.executeme = [self.args.sysexe, ] + else: + if self.args.packages: + self.executeme = [self.args.packages.split(",")[0].split(":")[0].strip(), ] + else: + self.executeme = None + aCL = self.cl.append + aXCL = self.xmlcl.append + assert args.parampass in [ + "0", + "argparse", + "positional", + ], 'args.parampass must be "0","positional" or "argparse"' + self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name) + self.tool_id = self.tool_name + self.newtool = gxt.Tool( + self.tool_name, + self.tool_id, + self.args.tool_version, + self.args.tool_desc, + FAKEEXE, + ) + self.newtarpath = "%s_toolshed.gz" % self.tool_name + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.tinputs = gxtp.Inputs() + self.toutputs = gxtp.Outputs() + self.testparam = [] + if self.args.script_path: + self.prepScript() + if self.args.command_override: + scos = open(self.args.command_override, "r").readlines() + self.command_override = [x.rstrip() for x in scos] + else: + self.command_override = None + if self.args.test_override: + stos = open(self.args.test_override, "r").readlines() + self.test_override = [x.rstrip() for x in stos] + else: + self.test_override = None + if self.args.script_path: + for ex in self.executeme: + aCL(ex) + aXCL(ex) + aCL(self.sfile) + aXCL("$runme") + else: + for ex in self.executeme: + aCL(ex) + aXCL(ex) + + if self.args.parampass == "0": + self.clsimple() + else: + if self.args.parampass == "positional": + self.prepclpos() + self.clpositional() + else: + self.prepargp() + self.clargparse() + + def clsimple(self): + """no parameters or repeats - uses < and > for i/o""" + aCL = self.cl.append + aXCL = self.xmlcl.append + if len(self.infiles) > 0: + aCL("<") + aCL(self.infiles[0]["infilename"]) + aXCL("<") + aXCL("$%s" % self.infiles[0]["infilename"]) + if len(self.outfiles) > 0: + aCL(">") + aCL(self.outfiles[0]["name"]) + aXCL(">") + aXCL("$%s" % self.outfiles[0]["name"]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aCL(c) + aXCL(c) + + def prepargp(self): + clsuffix = [] + xclsuffix = [] + for i, p in enumerate(self.infiles): + nam = p["infilename"] + if p["origCL"].strip().upper() == "STDIN": + appendme = [ + nam, + nam, + "< %s" % nam, + ] + xappendme = [ + nam, + nam, + "< $%s" % nam, + ] + else: + rep = p["repeat"] == "1" + over = "" + if rep: + over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' + appendme = [p["CL"], p["CL"], ""] + xappendme = [p["CL"], "$%s" % p["CL"], over] + clsuffix.append(appendme) + xclsuffix.append(xappendme) + for i, p in enumerate(self.outfiles): + if p["origCL"].strip().upper() == "STDOUT": + self.lastclredirect = [">", p["name"]] + self.lastxclredirect = [">", "$%s" % p["name"]] + else: + clsuffix.append([p["name"], p["name"], ""]) + xclsuffix.append([p["name"], "$%s" % p["name"], ""]) + for p in self.addpar: + nam = p["name"] + rep = p["repeat"] == "1" + if rep: + over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' + else: + over = p["override"] + clsuffix.append([p["CL"], nam, over]) + xclsuffix.append([p["CL"], '"$%s"' % nam, over]) + for p in self.selpar: + clsuffix.append([p["CL"], p["name"], p["override"]]) + xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) + self.xclsuffix = xclsuffix + self.clsuffix = clsuffix + + def prepclpos(self): + clsuffix = [] + xclsuffix = [] + for i, p in enumerate(self.infiles): + if p["origCL"].strip().upper() == "STDIN": + appendme = [ + "999", + p["infilename"], + "< $%s" % p["infilename"], + ] + xappendme = [ + "999", + p["infilename"], + "< $%s" % p["infilename"], + ] + else: + appendme = [p["CL"], p["infilename"], ""] + xappendme = [p["CL"], "$%s" % p["infilename"], ""] + clsuffix.append(appendme) + xclsuffix.append(xappendme) + for i, p in enumerate(self.outfiles): + if p["origCL"].strip().upper() == "STDOUT": + self.lastclredirect = [">", p["name"]] + self.lastxclredirect = [">", "$%s" % p["name"]] + else: + clsuffix.append([p["CL"], p["name"], ""]) + xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) + for p in self.addpar: + nam = p["name"] + rep = p["repeat"] == "1" # repeats make NO sense + if rep: + print(f'### warning. Repeats for {nam} ignored - not permitted in positional parameter command lines!') + over = p["override"] + clsuffix.append([p["CL"], nam, over]) + xclsuffix.append([p["CL"], '"$%s"' % nam, over]) + for p in self.selpar: + clsuffix.append([p["CL"], p["name"], p["override"]]) + xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) + clsuffix.sort() + xclsuffix.sort() + self.xclsuffix = xclsuffix + self.clsuffix = clsuffix + + def prepScript(self): + rx = open(self.args.script_path, "r").readlines() + rx = [x.rstrip() for x in rx] + rxcheck = [x.strip() for x in rx if x.strip() > ""] + assert len(rxcheck) > 0, "Supplied script is empty. Cannot run" + self.script = "\n".join(rx) + fhandle, self.sfile = tempfile.mkstemp( + prefix=self.tool_name, suffix="_%s" % (self.executeme[0]) + ) + tscript = open(self.sfile, "w") + tscript.write(self.script) + tscript.close() + self.spacedScript = [f" {x}" for x in rx if x.strip() > ""] + rx.insert(0,'#raw') + rx.append('#end raw') + self.escapedScript = rx + art = "%s.%s" % (self.tool_name, self.executeme[0]) + artifact = open(art, "wb") + artifact.write(bytes(self.script, "utf8")) + artifact.close() + + def cleanuppar(self): + """ positional parameters are complicated by their numeric ordinal""" + if self.args.parampass == "positional": + for i, p in enumerate(self.infiles): + assert ( + p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN" + ), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["label"], + ) + for i, p in enumerate(self.outfiles): + assert ( + p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT" + ), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["name"], + ) + for i, p in enumerate(self.addpar): + assert p[ + "CL" + ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( + p["CL"], + p["name"], + ) + for i, p in enumerate(self.infiles): + infp = copy.copy(p) + infp["origCL"] = infp["CL"] + if self.args.parampass in ["positional", "0"]: + infp["infilename"] = infp["label"].replace(" ", "_") + else: + infp["infilename"] = infp["CL"] + self.infiles[i] = infp + for i, p in enumerate(self.outfiles): + p["origCL"] = p["CL"] # keep copy + self.outfiles[i] = p + for i, p in enumerate(self.addpar): + p["origCL"] = p["CL"] + self.addpar[i] = p + + def clpositional(self): + # inputs in order then params + aCL = self.cl.append + for (k, v, koverride) in self.clsuffix: + if " " in v: + aCL("%s" % v) + else: + aCL(v) + aXCL = self.xmlcl.append + for (k, v, koverride) in self.xclsuffix: + aXCL(v) + if self.lastxclredirect: + aXCL(self.lastxclredirect[0]) + aXCL(self.lastxclredirect[1]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aCL(c) + aXCL(c) + + + def clargparse(self): + """argparse style""" + aCL = self.cl.append + aXCL = self.xmlcl.append + # inputs then params in argparse named form + + for (k, v, koverride) in self.xclsuffix: + if koverride > "": + k = koverride + aXCL(k) + else: + if len(k.strip()) == 1: + k = "-%s" % k + else: + k = "--%s" % k + aXCL(k) + aXCL(v) + for (k, v, koverride) in self.clsuffix: + if koverride > "": + k = koverride + elif len(k.strip()) == 1: + k = "-%s" % k + else: + k = "--%s" % k + aCL(k) + aCL(v) + if self.lastxclredirect: + aXCL(self.lastxclredirect[0]) + aXCL(self.lastxclredirect[1]) + if self.args.cl_user_suffix: # DIY CL end + clp = shlex.split(self.args.cl_user_suffix) + for c in clp: + aCL(c) + aXCL(c) + + def getNdash(self, newname): + if self.is_positional: + ndash = 0 + else: + ndash = 2 + if len(newname) < 2: + ndash = 1 + return ndash + + def doXMLparam(self): # noqa + """Add all needed elements to tool""" + for p in self.outfiles: + newname = p["name"] + newfmt = p["format"] + newcl = p["CL"] + test = p["test"] + oldcl = p["origCL"] + test = test.strip() + ndash = self.getNdash(newcl) + aparm = gxtp.OutputData( + name=newname, format=newfmt, num_dashes=ndash, label=newname + ) + aparm.positional = self.is_positional + if self.is_positional: + if oldcl.upper() == "STDOUT": + aparm.positional = 9999999 + aparm.command_line_override = "> $%s" % newname + else: + aparm.positional = int(oldcl) + aparm.command_line_override = "$%s" % newname + self.toutputs.append(aparm) + ld = None + if test.strip() > "": + if test.startswith("diff"): + c = "diff" + ld = 0 + if test.split(":")[1].isdigit: + ld = int(test.split(":")[1]) + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + lines_diff=ld, + ) + elif test.startswith("sim_size"): + c = "sim_size" + tn = test.split(":")[1].strip() + if tn > "": + if "." in tn: + delta = None + delta_frac = min(1.0, float(tn)) + else: + delta = int(tn) + delta_frac = None + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + delta=delta, + delta_frac=delta_frac, + ) + else: + c = test + tp = gxtp.TestOutput( + name=newname, + value="%s_sample" % newname, + compare=c, + ) + self.testparam.append(tp) + for p in self.infiles: + newname = p["infilename"] + newfmt = p["format"] + ndash = self.getNdash(newname) + reps = p.get("repeat", "0") == "1" + if not len(p["label"]) > 0: + alab = p["CL"] + else: + alab = p["label"] + aninput = gxtp.DataParam( + newname, + optional=False, + label=alab, + help=p["help"], + format=newfmt, + multiple=False, + num_dashes=ndash, + ) + aninput.positional = self.is_positional + if self.is_positional: + if p["origCL"].upper() == "STDIN": + aninput.positional = 9999998 + aninput.command_line_override = "> $%s" % newname + else: + aninput.positional = int(p["origCL"]) + aninput.command_line_override = "$%s" % newname + if reps: + repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {alab} as needed") + repe.append(aninput) + self.tinputs.append(repe) + tparm = gxtp.TestRepeat(name=f"R_{newname}") + tparm2 = gxtp.TestParam(newname, value="%s_sample" % newname) + tparm.append(tparm2) + self.testparam.append(tparm) + else: + self.tinputs.append(aninput) + tparm = gxtp.TestParam(newname, value="%s_sample" % newname) + self.testparam.append(tparm) + for p in self.addpar: + newname = p["name"] + newval = p["value"] + newlabel = p["label"] + newhelp = p["help"] + newtype = p["type"] + newcl = p["CL"] + oldcl = p["origCL"] + reps = p["repeat"] == "1" + if not len(newlabel) > 0: + newlabel = newname + ndash = self.getNdash(newname) + if newtype == "text": + aparm = gxtp.TextParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "integer": + aparm = gxtp.IntegerParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "float": + aparm = gxtp.FloatParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + elif newtype == "boolean": + aparm = gxtp.BooleanParam( + newname, + label=newlabel, + help=newhelp, + value=newval, + num_dashes=ndash, + ) + else: + raise ValueError( + 'Unrecognised parameter type "%s" for\ + additional parameter %s in makeXML' + % (newtype, newname) + ) + aparm.positional = self.is_positional + if self.is_positional: + aparm.positional = int(oldcl) + if reps: + repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {newlabel} as needed") + repe.append(aparm) + self.tinputs.append(repe) + tparm = gxtp.TestRepeat(name=f"R_{newname}") + tparm2 = gxtp.TestParam(newname, value=newval) + tparm.append(tparm2) + self.testparam.append(tparm) + else: + self.tinputs.append(aparm) + tparm = gxtp.TestParam(newname, value=newval) + self.testparam.append(tparm) + for p in self.selpar: + newname = p["name"] + newval = p["value"] + newlabel = p["label"] + newhelp = p["help"] + newtype = p["type"] + newcl = p["CL"] + if not len(newlabel) > 0: + newlabel = newname + ndash = self.getNdash(newname) + if newtype == "selecttext": + newtext = p["texts"] + aparm = gxtp.SelectParam( + newname, + label=newlabel, + help=newhelp, + num_dashes=ndash, + ) + for i in range(len(newval)): + anopt = gxtp.SelectOption( + value=newval[i], + text=newtext[i], + ) + aparm.append(anopt) + aparm.positional = self.is_positional + if self.is_positional: + aparm.positional = int(newcl) + self.tinputs.append(aparm) + tparm = gxtp.TestParam(newname, value=newval) + self.testparam.append(tparm) + else: + raise ValueError( + 'Unrecognised parameter type "%s" for\ + selecttext parameter %s in makeXML' + % (newtype, newname) + ) + for p in self.collections: + newkind = p["kind"] + newname = p["name"] + newlabel = p["label"] + newdisc = p["discover"] + collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind) + disc = gxtp.DiscoverDatasets( + pattern=newdisc, directory=f"{newname}", visible="false" + ) + collect.append(disc) + self.toutputs.append(collect) + try: + tparm = gxtp.TestOutputCollection(newname) # broken until PR merged. + self.testparam.append(tparm) + except Exception: + print("#### WARNING: Galaxyxml version does not have the PR merged yet - tests for collections must be over-ridden until then!") + + def doNoXMLparam(self): + """filter style package - stdin to stdout""" + if len(self.infiles) > 0: + alab = self.infiles[0]["label"] + if len(alab) == 0: + alab = self.infiles[0]["infilename"] + max1s = ( + "Maximum one input if parampass is 0 but multiple input files supplied - %s" + % str(self.infiles) + ) + assert len(self.infiles) == 1, max1s + newname = self.infiles[0]["infilename"] + aninput = gxtp.DataParam( + newname, + optional=False, + label=alab, + help=self.infiles[0]["help"], + format=self.infiles[0]["format"], + multiple=False, + num_dashes=0, + ) + aninput.command_line_override = "< $%s" % newname + aninput.positional = True + self.tinputs.append(aninput) + tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) + self.testparam.append(tp) + if len(self.outfiles) > 0: + newname = self.outfiles[0]["name"] + newfmt = self.outfiles[0]["format"] + anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) + anout.command_line_override = "> $%s" % newname + anout.positional = self.is_positional + self.toutputs.append(anout) + tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname) + self.testparam.append(tp) + + def makeXML(self): # noqa + """ + Create a Galaxy xml tool wrapper for the new script + Uses galaxyhtml + Hmmm. How to get the command line into correct order... + """ + if self.command_override: + self.newtool.command_override = self.command_override # config file + else: + self.newtool.command_override = self.xmlcl + cite = gxtp.Citations() + acite = gxtp.Citation(type="doi", value="10.1093/bioinformatics/bts573") + cite.append(acite) + self.newtool.citations = cite + safertext = "" + if self.args.help_text: + helptext = open(self.args.help_text, "r").readlines() + safertext = "\n".join([cheetah_escape(x) for x in helptext]) + if len(safertext.strip()) == 0: + safertext = ( + "Ask the tool author (%s) to rebuild with help text please\n" + % (self.args.user_email) + ) + if self.args.script_path: + if len(safertext) > 0: + safertext = safertext + "\n\n------\n" # transition allowed! + scr = [x for x in self.spacedScript if x.strip() > ""] + scr.insert(0, "\n\nScript::\n") + if len(scr) > 300: + scr = ( + scr[:100] + + [" >300 lines - stuff deleted", " ......"] + + scr[-100:] + ) + scr.append("\n") + safertext = safertext + "\n".join(scr) + self.newtool.help = safertext + self.newtool.version_command = f'echo "{self.args.tool_version}"' + std = gxtp.Stdios() + std1 = gxtp.Stdio() + std.append(std1) + self.newtool.stdios = std + requirements = gxtp.Requirements() + if self.args.packages: + try: + for d in self.args.packages.split(","): + ver = "" + d = d.replace("==", ":") + d = d.replace("=", ":") + if ":" in d: + packg, ver = d.split(":") + else: + packg = d + requirements.append( + gxtp.Requirement("package", packg.strip(), ver.strip()) + ) + except Exception: + print('### malformed packages string supplied - cannot parse =',self.args.packages) + sys.exit(2) + self.newtool.requirements = requirements + if self.args.parampass == "0": + self.doNoXMLparam() + else: + self.doXMLparam() + self.newtool.outputs = self.toutputs + self.newtool.inputs = self.tinputs + if self.args.script_path: + configfiles = gxtp.Configfiles() + configfiles.append( + gxtp.Configfile(name="runme", text="\n".join(self.escapedScript)) + ) + self.newtool.configfiles = configfiles + tests = gxtp.Tests() + test_a = gxtp.Test() + for tp in self.testparam: + test_a.append(tp) + tests.append(test_a) + self.newtool.tests = tests + self.newtool.add_comment( + "Created by %s at %s using the Galaxy Tool Factory." + % (self.args.user_email, timenow()) + ) + self.newtool.add_comment("Source in git at: %s" % (toolFactoryURL)) + exml0 = self.newtool.export() + exml = exml0.replace(FAKEEXE, "") # temporary work around until PR accepted + if ( + self.test_override + ): # cannot do this inside galaxyxml as it expects lxml objects for tests + part1 = exml.split("")[0] + part2 = exml.split("")[1] + fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2) + exml = fixed + # exml = exml.replace('range="1:"', 'range="1000:"') + xf = open("%s.xml" % self.tool_name, "w") + xf.write(exml) + xf.write("\n") + xf.close() + # ready for the tarball + + def run(self): #noqa + """ + generate test outputs by running a command line + won't work if command or test override in play - planemo is the + easiest way to generate test outputs for that case so is + automagically selected + """ + scl = " ".join(self.cl) + err = None + logname = f"{self.tool_name}_runner_log" + if self.args.parampass != "0": + if self.lastclredirect: + logf = open(self.lastclredirect[1], "wb") # is name of an output file + else: + logf = open(logname,'w') + logf.write("No dependencies so sending CL = '%s' to the fast direct runner instead of planemo to generate tests" % scl) + subp = subprocess.run( + self.cl, shell=False, stdout=logf, stderr=logf + ) + logf.close() + retval = subp.returncode + else: # work around special case - stdin and write to stdout + if len(self.infiles) > 0: + sti = open(self.infiles[0]["name"], "rb") + else: + sti = sys.stdin + if len(self.outfiles) > 0: + sto = open(self.outfiles[0]["name"], "wb") + else: + sto = sys.stdout + subp = subprocess.run( + self.cl, shell=False, stdout=sto, stdin=sti + ) + retval = subp.returncode + sto.close() + sti.close() + if retval != 0 and err: # problem + sys.stderr.write(err) + for p in self.outfiles: + oname = p["name"] + tdest = os.path.join(self.testdir, "%s_sample" % oname) + if not os.path.isfile(tdest): + if os.path.isfile(oname): + shutil.copyfile(oname, tdest) + dest = os.path.join(self.repdir, "%s.sample.%s" % (oname,p['format'])) + shutil.copyfile(oname, dest) + else: + if report_fail: + tout.write( + "###Tool may have failed - output file %s not found in testdir after planemo run %s." + % (oname, self.testdir) + ) + for p in self.infiles: + pth = p["name"] + dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + shutil.copyfile(pth, dest) + dest = os.path.join(self.repdir, "%s_sample.%s" % (p["infilename"],p["format"])) + shutil.copyfile(pth, dest) + with os.scandir('.') as outs: + for entry in outs: + newname = entry.name + if not entry.is_file() or entry.name.endswith('_sample'): + continue + if not (entry.name.endswith('.html') or entry.name.endswith('.gz') or entry.name.endswith(".tgz")): + fname, ext = os.path.splitext(entry.name) + if len(ext) > 1: + newname = f"{fname}_{ext[1:]}.txt" + else: + newname = f"{fname}.txt" + dest = os.path.join(self.repdir, newname) + src = entry.name + shutil.copyfile(src, dest) + return retval + + def writeShedyml(self): + """for planemo""" + yuser = self.args.user_email.split("@")[0] + yfname = os.path.join(self.tooloutdir, ".shed.yml") + yamlf = open(yfname, "w") + odict = { + "name": self.tool_name, + "owner": yuser, + "type": "unrestricted", + "description": self.args.tool_desc, + "synopsis": self.args.tool_desc, + "category": "TF Generated Tools", + } + yaml.dump(odict, yamlf, allow_unicode=True) + yamlf.close() + + def makeTool(self): + """write xmls and input samples into place""" + if self.args.parampass == 0: + self.doNoXMLparam() + else: + self.makeXML() + if self.args.script_path: + stname = os.path.join(self.tooloutdir, self.sfile) + if not os.path.exists(stname): + shutil.copyfile(self.sfile, stname) + xreal = "%s.xml" % self.tool_name + xout = os.path.join(self.tooloutdir, xreal) + shutil.copyfile(xreal, xout) + for p in self.infiles: + pth = p["name"] + dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + shutil.copyfile(pth, dest) + dest = os.path.join(self.repdir, "%s_sample.%s" % (p["infilename"],p["format"])) + shutil.copyfile(pth, dest) + + def makeToolTar(self, report_fail=False): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + for p in self.outfiles: + oname = p["name"] + tdest = os.path.join(self.testdir, "%s_sample" % oname) + src = os.path.join(self.testdir, oname) + if not os.path.isfile(tdest): + if os.path.isfile(src): + shutil.copyfile(src, tdest) + dest = os.path.join(self.repdir, "%s.sample" % (oname)) + shutil.copyfile(src, dest) + else: + if report_fail: + print( + "###Tool may have failed - output file %s not found in testdir after planemo run %s." + % (tdest, self.testdir) + ) + tf = tarfile.open(self.newtarpath, "w:gz") + tf.add( + name=self.tooloutdir, + arcname=self.tool_name, + filter=exclude_function, + ) + tf.close() + shutil.copyfile(self.newtarpath, self.args.new_tool) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if not entry.name.endswith('.html'): + _, ext = os.path.splitext(entry.name) + newname = f"{entry.name.replace('.','_')}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + if self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + +def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--script_path", default=None) + a("--history_test", default=None) + a("--cl_user_suffix", default=None) + a("--sysexe", default=None) + a("--packages", default=None) + a("--tool_name", default="newtool") + a("--tool_dir", default=None) + a("--input_files", default=[], action="append") + a("--output_files", default=[], action="append") + a("--user_email", default="Unknown") + a("--bad_user", default=None) + a("--help_text", default=None) + a("--tool_desc", default=None) + a("--tool_version", default=None) + a("--citations", default=None) + a("--command_override", default=None) + a("--test_override", default=None) + a("--additional_parameters", action="append", default=[]) + a("--selecttext_parameters", action="append", default=[]) + a("--edit_additional_parameters", action="store_true", default=False) + a("--parampass", default="positional") + a("--tfout", default="./tfout") + a("--new_tool", default="new_tool") + a("--galaxy_root", default="/galaxy-central") + a("--galaxy_venv", default="/galaxy_venv") + a("--collection", action="append", default=[]) + a("--include_tests", default=False, action="store_true") + a("--install", default=False, action="store_true") + a("--run_test", default=False, action="store_true") + a("--local_tools", default='tools') # relative to galaxy_root + a("--tool_conf_path", default='/galaxy_root/config/tool_conf.xml') + a("--galaxy_url", default="http://localhost:8080") + a("--toolshed_url", default="http://localhost:9009") + # make sure this is identical to tool_sheds_conf.xml + # localhost != 127.0.0.1 so validation fails + a("--toolshed_api_key", default="fakekey") + a("--galaxy_api_key", default="8993d65865e6d6d1773c2c34a1cc207d") + args = parser.parse_args() + assert not args.bad_user, ( + 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ +admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' + % (args.bad_user, args.bad_user) + ) + assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" + assert ( + args.sysexe or args.packages + ), "## Tool Factory wrapper expects an interpreter \ +or an executable package in --sysexe or --packages" + print('Hello from',os.getcwd()) + r = ScriptRunner(args) + r.writeShedyml() + r.makeTool() + r.makeToolTar() + if args.run_test: + if not args.packages or args.packages.strip() == "bash": + r.run() + r.makeToolTar() + else: + tt = ToolTester(report_dir=r.repdir, in_tool_archive=r.newtarpath, new_tool_archive=r.args.new_tool, galaxy_root=args.galaxy_root, include_tests=False) + if args.install: + #try: + tcu = ToolConfUpdater(args=args, tool_dir=os.path.join(args.galaxy_root,args.local_tools), + new_tool_archive_path=r.newtarpath, tool_conf_path=os.path.join(args.galaxy_root,'config','tool_conf.xml'), + new_tool_name=r.tool_name) + #except Exception: + # print("### Unable to install the new tool. Are you sure you have all the required special settings?") + +if __name__ == "__main__": + main() + diff -r c4f192ec521c -r 2a46da701dde toolfactory/ToolFactory.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory.xml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,562 @@ + + Scripts into tools v2.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + + galaxyxml + bioblend + + + 0: +--cl_user_suffix "$cl_suffix" + #end if + #if $cover.commover == "yes": + #if len(str($cover.command_override)) > 10: +--command_override "$commandoverride" + #end if + #if len(str($cover.test_override)) > 10: +--test_override "$testoverride" + #end if + #end if + #if $deps.packages > "": + --packages "$deps.packages" + #end if + #if $deps.usescript.choosescript == "yes": +--script_path "$runme" +--sysexe "$deps.usescript.scriptrunner" + #end if +--tool_name "$tool_name" --user_email "$__user_email__" --citations "$citeme" --parampass "$io_param.ppass.parampass" +--tool_desc "$tool_desc" +--tool_version "$tool_version" +--help_text "$helpme" +--new_tool "$new_tool" + #if $install: + --install + #end if + #if $do_test.run_test: + --run_test + #end if + #if $io_param.ppass.parampass != '0': + #if str($io_param.ppass.addparam.edit_params) == "yes": +--edit_additional_parameters + #end if + #for $apar in $io_param.ppass.addparam.additional_parameters: + #if $apar.ap_type.param_type=="selecttext": +--selecttext_parameters '{"name":"$apar.param_name", "label":"$apar.param_label", "help":"$apar.param_help", +"type":"$apar.ap_type.param_type","CL":"$apar.param_CL","override":"$apar.param_CLprefixed","value": [ + #for $i,$st in enumerate($apar.ap_type.selectTexts): + "$st.select_value" + #if ($i < (len($apar.ap_type.selectTexts)-1)): + , + #end if + #end for + ], "texts": [ + #for $i,$st in enumerate($apar.ap_type.selectTexts): + "$st.select_text" + #if ($i < (len($apar.ap_type.selectTexts)-1)): + , + #end if + + #end for + ] + }' + #else: +--additional_parameters '{"name": "$apar.param_name", "value": "$apar.ap_type.param_value", "label": "$apar.param_label", "help": "$apar.param_help", +"type": "$apar.ap_type.param_type","CL": "$apar.param_CL","override": "$apar.param_CLprefixed", "repeat": "$apar.param_repeat"}' + #end if + #end for + #end if + #for $intab in $io_param.ppass.io.history_inputs: +--input_files '{"name": "$intab.input_files", "CL": "$intab.input_CL", "format": "$intab.input_formats", "label": "$intab.input_label", "help": "$intab.input_help", "repeat": "$intab.input_repeat"}' + #end for + #for $otab in $io_param.ppass.io.history_outputs: +--output_files '{"name": "$otab.history_name", "format": "$otab.history_format", "CL": "$otab.history_CL", "test": "$otab.history_test"}' + #end for + #for $collect in $io_param.ppass.io.collection_outputs: +--collection '{"name": "$collect.name", "kind": "$collect.kind", "discover": "$collect.discover", "label": "$collect.label"}' + #end for +--galaxy_root "$__root_dir__" +--tool_dir "$__tool_directory__" + #end if +]]> + + +$deps.usescript.dynScript + + +#if $cover.commover == "yes" and len(str($cover.command_override).strip()) > 1: +$cover.command_override +#end if + + +#if $cover.commover == "yes" and len(str($cover.test_override).strip()) > 1: +$cover.test_override +#end if + + +${help_text} + + + +#for $citation in $citations: + #if $citation.citation_type.type == "bibtex": + **ENTRY**bibtex + ${citation.citation_type.bibtex} + #else + **ENTRY**doi + ${citation.citation_type.doi} + #end if +#end for + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + do_test['run_test'] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +.. class:: warningmark + +**Details and attribution** +(see GTF_) + +**Local Admins ONLY** +Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool. + +**If you find a bug** +Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_ + +**What it does** +This tool optionally generates normal workflow compatible first class Galaxy tools + +Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form. +Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user. +Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example. + +Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime. +These can be editable by the downstream user or baked in. + +A new tarball compatible with any Galaxy toolshed is created in your history, however, it does not have the test prepared. +There is a separate tool you can run to update the archive after testing with Planemo + +If the configuration in job_conf.xml allows tools to write to [galaxy_root]/tools, the new tool will be installed so you can view and test it locally. + +.. class:: warningmark + +**Note to system administrators** +This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances. +Admin_users will have the power to do anything they want as the Galaxy user if you install this tool. + +.. class:: warningmark + +**Use on public servers** is STRONGLY discouraged for obvious reasons + +The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation. +We recommend that you follow the good code hygiene practices associated with safe toolshed practices. + +Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing: + +:: + + # reverse order of text by row + import sys + inp = sys.argv[1] + outp = sys.argv[2] + i = open(inp,'r').readlines() + o = open(outp,'w') + for row in i: + rs = row.rstrip() + rs = list(rs) + rs.reverse() + o.write(''.join(rs)) + o.write('\n') + o.close() + +With argparse style parameters: + +:: + + # reverse order of text by row + import argparse + parser = argparse.ArgumentParser() + a = parser.add_argument + a('--infile',default='') + a('--outfile',default=None) + args = parser.parse_args() + inp = args.infile + outp = args.outfile + i = open(inp,'r').readlines() + o = open(outp,'w') + for row in i: + rs = row.rstrip() + rs = list(rs) + rs.reverse() + o.write(''.join(rs)) + o.write('\n') + o.close() + +R script to draw some plots - use a collection. + +:: + + + \# note this script takes NO input because it generates random data + dir.create('plots') + for (i in 1:10) { + foo = runif(100) + bar = rnorm(100) + bar = foo + 0.05*bar + pdf(paste('plots/yet',i,"anotherplot.pdf",sep='_')) + plot(foo,bar,main=paste("Foo by Bar plot \#",i),col="maroon", pch=3,cex=0.6) + dev.off() + foo = data.frame(a=runif(100),b=runif(100),c=runif(100),d=runif(100),e=runif(100),f=runif(100)) + bar = as.matrix(foo) + pdf(paste('plots/yet',i,"anotherheatmap.pdf",sep='_')) + heatmap(bar,main='Random Heatmap') + dev.off() + } + + + +Paper_ + +*Licensing* + +Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012 +All rights reserved. +Licensed under the LGPL_ + +.. _LGPL: http://www.gnu.org/copyleft/lesser.html +.. _GTF: https://github.com/fubar2/toolfactory +.. _Paper: https://academic.oup.com/bioinformatics/article/28/23/3139/192853 + + + + + 10.1093/bioinformatics/bts573 + +
+ + diff -r c4f192ec521c -r 2a46da701dde toolfactory/ToolFactory_tester.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory_tester.py Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,202 @@ +# see https://github.com/fubar2/toolfactory +# +# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 +# +# all rights reserved +# Licensed under the LGPL +# suggestions for improvement and bug fixes welcome at +# https://github.com/fubar2/toolfactory +# +# July 2020: BCC was fun and I feel like rip van winkle after 5 years. +# Decided to +# 1. Fix the toolfactory so it works - done for simplest case +# 2. Fix planemo so the toolfactory function works +# 3. Rewrite bits using galaxyxml functions where that makes sense - done + +import argparse +import copy +import os +import subprocess +import shutil +import sys +import tarfile +import tempfile +import time + +myversion = "V2.2 April 2021" +verbose = True +debug = True +toolFactoryURL = "https://github.com/fubar2/toolfactory" + +def timenow(): + """return current time as a string""" + return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) + +class ToolTester(): + # requires highly insecure docker settings - like write to tool_conf.xml and to tools ! + # if in a container possibly not so courageous. + # Fine on your own laptop but security red flag for most production instances + # uncompress passed tar, run planemo and rebuild a new tarball with tests + + def __init__(self, args=None, in_tool_archive='/galaxy-central/tools/newtool/newtool_toolshed.gz', new_tool_archive=None): + self.args = args + self.new_tool_archive = new_tool_archive + assert tarfile.is_tarfile(in_tool_archive) + # this is not going to go well with arbitrary names. TODO introspect tool xml! + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + tff = tarfile.open(in_tool_archive, "r:*") + flist = tff.getnames() + ourdir = os.path.commonpath(flist) # eg pyrevpos + self.tool_name = ourdir + ourxmls = [x for x in flist if x.lower().endswith('.xml') and os.path.split(x)[0] == ourdir] + assert len(ourxmls) > 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + tff.close() + self.update_tests(ourdir) + self.makeTool() + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + penv['HOME'] = '/home/ross/galaxy-release_21.01' + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = f"{tool_name}_planemo_test_report.html" + cll = [ + "planemo", + "test", + "--test_output", + os.path.abspath(tool_test_output), + "--galaxy_root", + self.args.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print(cll) + p = subprocess.run( + cll, + capture_output=True, + encoding='utf8', + env = penv, + shell=False, + ) + return p + + def makeTool(self): + """write xmls and input samples into place""" + for xreal in self.ourxmls: + x = os.path.split(xreal)[1] + xout = os.path.join(self.tooloutdir,x) + shutil.copyfile(xreal, xout) + # for p in self.infiles: + # pth = p["name"] + # dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + # dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tooloutdir, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + with os.scandir('.') as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src =entry.name + shutil.copyfile(src, dest) + if True or self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + #sys.stderr.write('%s, stdout=%s, stderr=%s' % (xmlf, capture.stdout, capture.stdout)) + print('%s, stdout=%s, stderr=%s' % (capture.stdout, capture.stdout,xmlf)) + +def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--in_tool_archive", default=None) + a("--new_tested_tool_archive", default=None) + a("--galaxy_root", default="/home/ross/galaxy-release_21.01/") + args = parser.parse_args() + print('Hello from',os.getcwd()) + tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + +if __name__ == "__main__": + main() diff -r c4f192ec521c -r 2a46da701dde toolfactory/ToolFactory_tester.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/ToolFactory_tester.xml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,455 @@ + + + + Test an untested tool and update it + + git + planemo + + + + + +$tf_archive_tester_log; +]]> + + 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + self.update_tests(ourdir) + tff.close() + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.makeTool() + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + #penv['HOME'] = os.path.join(self.args.galaxy_root,'planemo') + #penv["GALAXY_VIRTUAL_ENV"] = os.path.join(penv['HOME'],'.planemo','gx_venv_3.9') + penv["PIP_CACHE_DIR"] = os.path.join(self.args.galaxy_root,'pipcache') + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = f"{tool_name}_planemo_test_report.html" + cll = [ + "planemo", + "test", + "--biocontainers", + "--test_output", + os.path.abspath(tool_test_output), + "--galaxy_root", + self.args.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print(cll) + p = subprocess.run( + cll, + #capture_output=True, + encoding='utf8', + env = penv, + shell=False, + ) + return p + + def makeTool(self): + """write xmls and input samples into place""" + for xreal in self.ourxmls: + x = os.path.split(xreal)[1] + xout = os.path.join(self.tooloutdir,x) + shutil.copyfile(xreal, xout) + # for p in self.infiles: + # pth = p["name"] + # dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + # dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tool_name, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + with os.scandir('.') as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src =entry.name + shutil.copyfile(src, dest) + if True or self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + #sys.stderr.write('%s, stdout=%s, stderr=%s' % (xmlf, capture.stdout, capture.stdout)) + #print('%s, stdout=%s, stderr=%s' % (capture.stdout, capture.stdout,xmlf)) + +def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--in_tool_archive", default=None) + a("--new_tested_tool_archive", default=None) + a("--galaxy_root", default="/home/ross/gal21/") + args = parser.parse_args() + print('Hello from',os.getcwd()) + tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + +if __name__ == "__main__": + main() + + +#end raw]]> + + + + + + + + + + + + + + + + + + 0 + self.ourxmls = ourxmls # [os.path.join(tool_path,x) for x in ourxmls] + res = tff.extractall() + self.update_tests(ourdir) + tff.close() + self.tooloutdir = "./tfout" + self.repdir = "./TF_run_report" + self.testdir = os.path.join(self.tooloutdir, "test-data") + if not os.path.exists(self.tooloutdir): + os.mkdir(self.tooloutdir) + if not os.path.exists(self.testdir): + os.mkdir(self.testdir) + if not os.path.exists(self.repdir): + os.mkdir(self.repdir) + self.makeTool() + self.moveRunOutputs() + self.makeToolTar() + + def call_planemo(self,xmlpath,ourdir): + penv = os.environ + penv['HOME'] = '/home/ross/galaxy-release_21.01' + toolfile = os.path.split(xmlpath)[1] + tool_name = self.tool_name + tool_test_output = f"{tool_name}_planemo_test_report.html" + cll = [ + "planemo", + "test", + "--test_output", + os.path.abspath(tool_test_output), + "--galaxy_root", + self.args.galaxy_root, + "--update_test_data", + os.path.abspath(xmlpath), + ] + print(cll) + p = subprocess.run( + cll, + capture_output=True, + encoding='utf8', + env = penv, + shell=False, + ) + return p + + def makeTool(self): + """write xmls and input samples into place""" + for xreal in self.ourxmls: + x = os.path.split(xreal)[1] + xout = os.path.join(self.tooloutdir,x) + shutil.copyfile(xreal, xout) + # for p in self.infiles: + # pth = p["name"] + # dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + # dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) + # shutil.copyfile(pth, dest) + + def makeToolTar(self): + """move outputs into test-data and prepare the tarball""" + excludeme = "_planemo_test_report.html" + def exclude_function(tarinfo): + filename = tarinfo.name + return None if filename.endswith(excludeme) else tarinfo + newtar = 'new_%s_toolshed.gz' % self.tool_name + ttf = tarfile.open(newtar, "w:gz") + ttf.add(name=self.tooloutdir, + arcname=self.tool_name, + filter=exclude_function) + ttf.close() + shutil.copyfile(newtar, self.new_tool_archive) + + def moveRunOutputs(self): + """need to move planemo or run outputs into toolfactory collection""" + with os.scandir(self.tooloutdir) as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.tooloutdir, entry.name) + shutil.copyfile(src, dest) + with os.scandir('.') as outs: + for entry in outs: + if not entry.is_file(): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src =entry.name + shutil.copyfile(src, dest) + if True or self.args.include_tests: + with os.scandir(self.testdir) as outs: + for entry in outs: + if (not entry.is_file()) or entry.name.endswith( + "_planemo_test_report.html" + ): + continue + if "." in entry.name: + _, ext = os.path.splitext(entry.name) + if ext in [".tgz", ".json"]: + continue + if ext in [".yml", ".xml", ".yaml"]: + newname = f"{entry.name.replace('.','_')}.txt" + else: + newname = entry.name + else: + newname = f"{entry.name}.txt" + dest = os.path.join(self.repdir, newname) + src = os.path.join(self.testdir, entry.name) + shutil.copyfile(src, dest) + + def update_tests(self,ourdir): + for xmlf in self.ourxmls: + capture = self.call_planemo(xmlf,ourdir) + #sys.stderr.write('%s, stdout=%s, stderr=%s' % (xmlf, capture.stdout, capture.stdout)) + print('%s, stdout=%s, stderr=%s' % (capture.stdout, capture.stdout,xmlf)) + + def main(): + """ + This is a Galaxy wrapper. + It expects to be called by a special purpose tool.xml + """ + parser = argparse.ArgumentParser() + a = parser.add_argument + a("--in_tool_archive", default=None) + a("--new_tested_tool_archive", default=None) + a("--galaxy_root", default="/home/ross/gal21/") + args = parser.parse_args() + print('Hello from',os.getcwd()) + tt = ToolTester(args=args, in_tool_archive=args.in_tool_archive, new_tool_archive=args.new_tested_tool_archive) + if __name__ == "__main__": + main() + +]]> + + 10.1093/bioinformatics/bts573 + + + diff -r c4f192ec521c -r 2a46da701dde toolfactory/images/TFasIDE.png Binary file toolfactory/images/TFasIDE.png has changed diff -r c4f192ec521c -r 2a46da701dde toolfactory/images/dynamicScriptTool.png Binary file toolfactory/images/dynamicScriptTool.png has changed diff -r c4f192ec521c -r 2a46da701dde toolfactory/images/hello_toolfactory_form.png Binary file toolfactory/images/hello_toolfactory_form.png has changed diff -r c4f192ec521c -r 2a46da701dde toolfactory/images/lintplanemo-2021-01-08_18.02.45.mkv Binary file toolfactory/images/lintplanemo-2021-01-08_18.02.45.mkv has changed diff -r c4f192ec521c -r 2a46da701dde toolfactory/install_tf_demos.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/install_tf_demos.py Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,42 @@ +import argparse +import urllib.request + +from bioblend import galaxy + +WF = "https://drive.google.com/uc?export=download&id=13xE8o7tucHGNA0qYkEP98FfUGl2wdOU5" +HIST = ( + "https://drive.google.com/uc?export=download&id=1V0ZN9ZBuqcGJvt2AP7s3g0q11uYEhdDB" +) +WF_FILE = "tf_workflow.ga" +HIST_FILE = "tf_history.tgz" + + +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "-g", "--galaxy", help="URL of target galaxy", default="http://localhost:9090" + ) + parser.add_argument("-a", "--key", help="Galaxy admin key", default=None) + return parser + + +def main(): + """ + load the planemo tool_factory demonstration history and tool generating workflow + fails in planemo served galaxies because there seems to be no user in trans? + """ + args = _parser().parse_args() + urllib.request.urlretrieve(WF, WF_FILE) + urllib.request.urlretrieve(HIST, HIST_FILE) + assert args.key, "Need an administrative key for the target Galaxy supplied please" + gi = galaxy.GalaxyInstance( + url=args.galaxy, key=args.key, email="planemo@galaxyproject.org" + ) + x = gi.workflows.import_workflow_from_local_path(WF_FILE, publish=True) + print(f"installed {WF_FILE} Returned = {x}\n") + x = gi.histories.import_history(file_path=HIST_FILE) + print(f"installed {HIST_FILE} Returned = {x}\n") + + +if __name__ == "__main__": + main() diff -r c4f192ec521c -r 2a46da701dde toolfactory/maketf.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/maketf.sh Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,30 @@ +# if a new ubuntu image, will need a port mapped and add some basics first +# apt update ; apt install -y python3-dev python3-venv python3-wheel nano curl wget git python3-setuptools +TARGDIR="/galaxy-central" +PDIR="/planemo" +git clone --recursive https://github.com/fubar2/planemo.git $PDIR +mkdir -p $TARGDIR +curl -L -s https://github.com/galaxyproject/galaxy/archive/dev.tar.gz | tar xzf - --strip-components=1 -C $TARGDIR +cd $PDIR +mkdir mytools +python3 -m venv .venv +. .venv/bin/activate +python3 setup.py build +python3 setup.py install +planemo conda_init --conda_prefix $PDIR/con +/planemo/con/bin/conda init +. ~/.bashrc +/planemo/con/bin/conda activate base +/planemo/con/bin/conda install -y -c bioconda -c conda-forge configparser galaxyxml +# without this, planemo does not work in docker... No clue why but planemo goes all pear shaped +# but pip reports that it's missing - installing it explicitly seems to do some kind of magic +echo "Starting first run. This takes ages and includes building the Galaxy client. Be patient. Do something else for 20 minutes" +. $PDIR/.venv/bin/activate +planemo tool_factory --galaxy_root $TARGDIR --port 9090 --host 0.0.0.0 --conda_dependency_resolution --conda_auto_install +# planemo tool_factory --galaxy_root $TARGDIR --port 8080 --host 0.0.0.0 --conda_dependency_resolution --conda_auto_install +#planemo tool_factory --galaxy_root $TARGDIR --conda_prefix $PDIR/con --port 9090 --host 0.0.0.0 +# planemo serve --galaxy_root /galaxy-central/ --conda_prefix /planemo/con --port 8080 --host 0.0.0.0 --conda_dependency_resolution --conda_auto_install /planemo/.venv/lib/python3.8/site-packages/planemo-0.74.1-py3.8.egg/planemo_ext/tool_factory_2 +# planemo serve --galaxy_root /galaxy-central/ --port 8080 --host 0.0.0.0 --conda_dependency_resolution --conda_auto_install /planemo/.venv/lib/python3.8/site-packages/planemo-0.74.1-py3.8.egg/planemo_ext/tool_factory_2 +# planemo serve --galaxy_root $TARGDIR --port 8080 --host 0.0.0.0 --conda_dependency_resolution --conda_auto_install /usr/local/lib/python3.6/dist-packages/planemo-0.74.1-py3.6.egg/planemo_ext/tool_factory_2/ + +# host is needed to get -p 9090:9090 to work in docker. Default 127.0.0.1 doesn't redirect :(ls -l /tmp diff -r c4f192ec521c -r 2a46da701dde toolfactory/rgToolFactory2.py --- a/toolfactory/rgToolFactory2.py Tue Apr 20 05:30:52 2021 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1197 +0,0 @@ -# replace with shebang for biocontainer -# see https://github.com/fubar2/toolfactory -# -# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012 -# -# all rights reserved -# Licensed under the LGPL -# suggestions for improvement and bug fixes welcome at -# https://github.com/fubar2/toolfactory -# -# July 2020: BCC was fun and I feel like rip van winkle after 5 years. -# Decided to -# 1. Fix the toolfactory so it works - done for simplest case -# 2. Fix planemo so the toolfactory function works -# 3. Rewrite bits using galaxyxml functions where that makes sense - done - -import argparse -import copy -import json -import logging -import os -import re -import shlex -import shutil -import subprocess -import sys -import tarfile -import tempfile -import time - -from bioblend import ConnectionError -from bioblend import toolshed - -import galaxyxml.tool as gxt -import galaxyxml.tool.parameters as gxtp - -import lxml - -import yaml - -myversion = "V2.2 February 2021" -verbose = True -debug = True -toolFactoryURL = "https://github.com/fubar2/toolfactory" -foo = len(lxml.__version__) -FAKEEXE = "~~~REMOVE~~~ME~~~" -# need this until a PR/version bump to fix galaxyxml prepending the exe even -# with override. - - -def timenow(): - """return current time as a string""" - return time.strftime("%d/%m/%Y %H:%M:%S", time.localtime(time.time())) - - -cheetah_escape_table = {"$": "\\$", "#": "\\#"} - - -def cheetah_escape(text): - """Produce entities within text.""" - return "".join([cheetah_escape_table.get(c, c) for c in text]) - - -def parse_citations(citations_text): - """""" - citations = [c for c in citations_text.split("**ENTRY**") if c.strip()] - citation_tuples = [] - for citation in citations: - if citation.startswith("doi"): - citation_tuples.append(("doi", citation[len("doi") :].strip())) - else: - citation_tuples.append(("bibtex", citation[len("bibtex") :].strip())) - return citation_tuples - - -class ScriptRunner: - """Wrapper for an arbitrary script - uses galaxyxml - - """ - - def __init__(self, args=None): # noqa - """ - prepare command line cl for running the tool here - and prepare elements needed for galaxyxml tool generation - """ - self.ourcwd = os.getcwd() - self.collections = [] - if len(args.collection) > 0: - try: - self.collections = [ - json.loads(x) for x in args.collection if len(x.strip()) > 1 - ] - except Exception: - print( - f"--collections parameter {str(args.collection)} is malformed - should be a dictionary" - ) - try: - self.infiles = [ - json.loads(x) for x in args.input_files if len(x.strip()) > 1 - ] - except Exception: - print( - f"--input_files parameter {str(args.input_files)} is malformed - should be a dictionary" - ) - try: - self.outfiles = [ - json.loads(x) for x in args.output_files if len(x.strip()) > 1 - ] - except Exception: - print( - f"--output_files parameter {args.output_files} is malformed - should be a dictionary" - ) - try: - self.addpar = [ - json.loads(x) for x in args.additional_parameters if len(x.strip()) > 1 - ] - except Exception: - print( - f"--additional_parameters {args.additional_parameters} is malformed - should be a dictionary" - ) - try: - self.selpar = [ - json.loads(x) for x in args.selecttext_parameters if len(x.strip()) > 1 - ] - except Exception: - print( - f"--selecttext_parameters {args.selecttext_parameters} is malformed - should be a dictionary" - ) - self.args = args - self.cleanuppar() - self.lastclredirect = None - self.lastxclredirect = None - self.cl = [] - self.xmlcl = [] - self.is_positional = self.args.parampass == "positional" - if self.args.sysexe: - if ' ' in self.args.sysexe: - self.executeme = self.args.sysexe.split(' ') - else: - self.executeme = [self.args.sysexe, ] - else: - if self.args.packages: - self.executeme = [self.args.packages.split(",")[0].split(":")[0].strip(), ] - else: - self.executeme = None - aCL = self.cl.append - aXCL = self.xmlcl.append - assert args.parampass in [ - "0", - "argparse", - "positional", - ], 'args.parampass must be "0","positional" or "argparse"' - self.tool_name = re.sub("[^a-zA-Z0-9_]+", "", args.tool_name) - self.tool_id = self.tool_name - self.newtool = gxt.Tool( - self.tool_name, - self.tool_id, - self.args.tool_version, - self.args.tool_desc, - FAKEEXE, - ) - self.newtarpath = "%s_toolshed.gz" % self.tool_name - self.tooloutdir = "./tfout" - self.repdir = "./TF_run_report_tempdir" - self.testdir = os.path.join(self.tooloutdir, "test-data") - if not os.path.exists(self.tooloutdir): - os.mkdir(self.tooloutdir) - if not os.path.exists(self.testdir): - os.mkdir(self.testdir) - if not os.path.exists(self.repdir): - os.mkdir(self.repdir) - self.tinputs = gxtp.Inputs() - self.toutputs = gxtp.Outputs() - self.testparam = [] - if self.args.script_path: - self.prepScript() - if self.args.command_override: - scos = open(self.args.command_override, "r").readlines() - self.command_override = [x.rstrip() for x in scos] - else: - self.command_override = None - if self.args.test_override: - stos = open(self.args.test_override, "r").readlines() - self.test_override = [x.rstrip() for x in stos] - else: - self.test_override = None - if self.args.script_path: - for ex in self.executeme: - aCL(ex) - aXCL(ex) - aCL(self.sfile) - aXCL("$runme") - else: - for ex in self.executeme: - aCL(ex) - aXCL(ex) - - self.elog = os.path.join(self.repdir, "%s_error_log.txt" % self.tool_name) - self.tlog = os.path.join(self.repdir, "%s_runner_log.txt" % self.tool_name) - if self.args.parampass == "0": - self.clsimple() - else: - if self.args.parampass == "positional": - self.prepclpos() - self.clpositional() - else: - self.prepargp() - self.clargparse() - if self.args.cl_suffix: # DIY CL end - clp = shlex.split(self.args.cl_suffix) - for c in clp: - aCL(c) - aXCL(c) - - def clsimple(self): - """no parameters or repeats - uses < and > for i/o""" - aCL = self.cl.append - aXCL = self.xmlcl.append - if len(self.infiles) > 0: - aCL("<") - aCL(self.infiles[0]["infilename"]) - aXCL("<") - aXCL("$%s" % self.infiles[0]["infilename"]) - if len(self.outfiles) > 0: - aCL(">") - aCL(self.outfiles[0]["name"]) - aXCL(">") - aXCL("$%s" % self.outfiles[0]["name"]) - - def prepargp(self): - clsuffix = [] - xclsuffix = [] - for i, p in enumerate(self.infiles): - nam = p["infilename"] - if p["origCL"].strip().upper() == "STDIN": - appendme = [ - nam, - nam, - "< %s" % nam, - ] - xappendme = [ - nam, - nam, - "< $%s" % nam, - ] - else: - rep = p["repeat"] == "1" - over = "" - if rep: - over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' - appendme = [p["CL"], p["CL"], ""] - xappendme = [p["CL"], "$%s" % p["CL"], over] - clsuffix.append(appendme) - xclsuffix.append(xappendme) - for i, p in enumerate(self.outfiles): - if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] - self.lastxclredirect = [">", "$%s" % p["name"]] - else: - clsuffix.append([p["name"], p["name"], ""]) - xclsuffix.append([p["name"], "$%s" % p["name"], ""]) - for p in self.addpar: - nam = p["name"] - rep = p["repeat"] == "1" - if rep: - over = f'#for $rep in $R_{nam}:\n--{nam} "$rep.{nam}"\n#end for' - else: - over = p["override"] - clsuffix.append([p["CL"], nam, over]) - xclsuffix.append([p["CL"], nam, over]) - for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) - xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) - self.xclsuffix = xclsuffix - self.clsuffix = clsuffix - - def prepclpos(self): - clsuffix = [] - xclsuffix = [] - for i, p in enumerate(self.infiles): - if p["origCL"].strip().upper() == "STDIN": - appendme = [ - "999", - p["infilename"], - "< $%s" % p["infilename"], - ] - xappendme = [ - "999", - p["infilename"], - "< $%s" % p["infilename"], - ] - else: - appendme = [p["CL"], p["infilename"], ""] - xappendme = [p["CL"], "$%s" % p["infilename"], ""] - clsuffix.append(appendme) - xclsuffix.append(xappendme) - for i, p in enumerate(self.outfiles): - if p["origCL"].strip().upper() == "STDOUT": - self.lastclredirect = [">", p["name"]] - self.lastxclredirect = [">", "$%s" % p["name"]] - else: - clsuffix.append([p["CL"], p["name"], ""]) - xclsuffix.append([p["CL"], "$%s" % p["name"], ""]) - for p in self.addpar: - nam = p["name"] - rep = p["repeat"] == "1" # repeats make NO sense - if rep: - print(f'### warning. Repeats for {nam} ignored - not permitted in positional parameter command lines!') - over = p["override"] - clsuffix.append([p["CL"], nam, over]) - xclsuffix.append([p["CL"], '"$%s"' % nam, over]) - for p in self.selpar: - clsuffix.append([p["CL"], p["name"], p["override"]]) - xclsuffix.append([p["CL"], '"$%s"' % p["name"], p["override"]]) - clsuffix.sort() - xclsuffix.sort() - self.xclsuffix = xclsuffix - self.clsuffix = clsuffix - - def prepScript(self): - rx = open(self.args.script_path, "r").readlines() - rx = [x.rstrip() for x in rx] - rxcheck = [x.strip() for x in rx if x.strip() > ""] - assert len(rxcheck) > 0, "Supplied script is empty. Cannot run" - self.script = "\n".join(rx) - fhandle, self.sfile = tempfile.mkstemp( - prefix=self.tool_name, suffix="_%s" % (self.executeme[0]) - ) - tscript = open(self.sfile, "w") - tscript.write(self.script) - tscript.close() - self.escapedScript = [cheetah_escape(x) for x in rx] - self.spacedScript = [f" {x}" for x in rx if x.strip() > ""] - art = "%s.%s" % (self.tool_name, self.executeme[0]) - artifact = open(art, "wb") - artifact.write(bytes("\n".join(self.escapedScript), "utf8")) - artifact.close() - - def cleanuppar(self): - """ positional parameters are complicated by their numeric ordinal""" - if self.args.parampass == "positional": - for i, p in enumerate(self.infiles): - assert ( - p["CL"].isdigit() or p["CL"].strip().upper() == "STDIN" - ), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["label"], - ) - for i, p in enumerate(self.outfiles): - assert ( - p["CL"].isdigit() or p["CL"].strip().upper() == "STDOUT" - ), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["name"], - ) - for i, p in enumerate(self.addpar): - assert p[ - "CL" - ].isdigit(), "Positional parameters must be ordinal integers - got %s for %s" % ( - p["CL"], - p["name"], - ) - for i, p in enumerate(self.infiles): - infp = copy.copy(p) - infp["origCL"] = infp["CL"] - if self.args.parampass in ["positional", "0"]: - infp["infilename"] = infp["label"].replace(" ", "_") - else: - infp["infilename"] = infp["CL"] - self.infiles[i] = infp - for i, p in enumerate(self.outfiles): - p["origCL"] = p["CL"] # keep copy - self.outfiles[i] = p - for i, p in enumerate(self.addpar): - p["origCL"] = p["CL"] - self.addpar[i] = p - - def clpositional(self): - # inputs in order then params - aCL = self.cl.append - for (k, v, koverride) in self.clsuffix: - if " " in v: - aCL("%s" % v) - else: - aCL(v) - aXCL = self.xmlcl.append - for (k, v, koverride) in self.xclsuffix: - aXCL(v) - if self.lastxclredirect: - aXCL(self.lastxclredirect[0]) - aXCL(self.lastxclredirect[1]) - - def clargparse(self): - """argparse style""" - aCL = self.cl.append - aXCL = self.xmlcl.append - # inputs then params in argparse named form - - for (k, v, koverride) in self.xclsuffix: - if koverride > "": - k = koverride - aXCL(k) - else: - if len(k.strip()) == 1: - k = "-%s" % k - else: - k = "--%s" % k - aXCL(k) - aXCL(v) - for (k, v, koverride) in self.clsuffix: - if koverride > "": - k = koverride - elif len(k.strip()) == 1: - k = "-%s" % k - else: - k = "--%s" % k - aCL(k) - aCL(v) - if self.lastxclredirect: - aXCL(self.lastxclredirect[0]) - aXCL(self.lastxclredirect[1]) - - def getNdash(self, newname): - if self.is_positional: - ndash = 0 - else: - ndash = 2 - if len(newname) < 2: - ndash = 1 - return ndash - - def doXMLparam(self): - """Add all needed elements to tool""" # noqa - for p in self.outfiles: - newname = p["name"] - newfmt = p["format"] - newcl = p["CL"] - test = p["test"] - oldcl = p["origCL"] - test = test.strip() - ndash = self.getNdash(newcl) - aparm = gxtp.OutputData( - name=newname, format=newfmt, num_dashes=ndash, label=newname - ) - aparm.positional = self.is_positional - if self.is_positional: - if oldcl.upper() == "STDOUT": - aparm.positional = 9999999 - aparm.command_line_override = "> $%s" % newname - else: - aparm.positional = int(oldcl) - aparm.command_line_override = "$%s" % newname - self.toutputs.append(aparm) - ld = None - if test.strip() > "": - if test.startswith("diff"): - c = "diff" - ld = 0 - if test.split(":")[1].isdigit: - ld = int(test.split(":")[1]) - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - lines_diff=ld, - ) - elif test.startswith("sim_size"): - c = "sim_size" - tn = test.split(":")[1].strip() - if tn > "": - if "." in tn: - delta = None - delta_frac = min(1.0, float(tn)) - else: - delta = int(tn) - delta_frac = None - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - delta=delta, - delta_frac=delta_frac, - ) - else: - c = test - tp = gxtp.TestOutput( - name=newname, - value="%s_sample" % newname, - compare=c, - ) - self.testparam.append(tp) - for p in self.infiles: - newname = p["infilename"] - newfmt = p["format"] - ndash = self.getNdash(newname) - reps = p.get("repeat", "0") == "1" - if not len(p["label"]) > 0: - alab = p["CL"] - else: - alab = p["label"] - aninput = gxtp.DataParam( - newname, - optional=False, - label=alab, - help=p["help"], - format=newfmt, - multiple=False, - num_dashes=ndash, - ) - aninput.positional = self.is_positional - if self.is_positional: - if p["origCL"].upper() == "STDIN": - aninput.positional = 9999998 - aninput.command_line_override = "> $%s" % newname - else: - aninput.positional = int(p["origCL"]) - aninput.command_line_override = "$%s" % newname - if reps: - repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {alab} as needed") - repe.append(aninput) - self.tinputs.append(repe) - tparm = gxtp.TestRepeat(name=f"R_{newname}") - tparm2 = gxtp.TestParam(newname, value="%s_sample" % newname) - tparm.append(tparm2) - self.testparam.append(tparm) - else: - self.tinputs.append(aninput) - tparm = gxtp.TestParam(newname, value="%s_sample" % newname) - self.testparam.append(tparm) - for p in self.addpar: - newname = p["name"] - newval = p["value"] - newlabel = p["label"] - newhelp = p["help"] - newtype = p["type"] - newcl = p["CL"] - oldcl = p["origCL"] - reps = p["repeat"] == "1" - if not len(newlabel) > 0: - newlabel = newname - ndash = self.getNdash(newname) - if newtype == "text": - aparm = gxtp.TextParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "integer": - aparm = gxtp.IntegerParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "float": - aparm = gxtp.FloatParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - elif newtype == "boolean": - aparm = gxtp.BooleanParam( - newname, - label=newlabel, - help=newhelp, - value=newval, - num_dashes=ndash, - ) - else: - raise ValueError( - 'Unrecognised parameter type "%s" for\ - additional parameter %s in makeXML' - % (newtype, newname) - ) - aparm.positional = self.is_positional - if self.is_positional: - aparm.positional = int(oldcl) - if reps: - repe = gxtp.Repeat(name=f"R_{newname}", title=f"Add as many {newlabel} as needed") - repe.append(aparm) - self.tinputs.append(repe) - tparm = gxtp.TestRepeat(name=f"R_{newname}") - tparm2 = gxtp.TestParam(newname, value=newval) - tparm.append(tparm2) - self.testparam.append(tparm) - else: - self.tinputs.append(aparm) - tparm = gxtp.TestParam(newname, value=newval) - self.testparam.append(tparm) - for p in self.selpar: - newname = p["name"] - newval = p["value"] - newlabel = p["label"] - newhelp = p["help"] - newtype = p["type"] - newcl = p["CL"] - if not len(newlabel) > 0: - newlabel = newname - ndash = self.getNdash(newname) - if newtype == "selecttext": - newtext = p["texts"] - aparm = gxtp.SelectParam( - newname, - label=newlabel, - help=newhelp, - num_dashes=ndash, - ) - for i in range(len(newval)): - anopt = gxtp.SelectOption( - value=newval[i], - text=newtext[i], - ) - aparm.append(anopt) - aparm.positional = self.is_positional - if self.is_positional: - aparm.positional = int(newcl) - self.tinputs.append(aparm) - tparm = gxtp.TestParam(newname, value=newval) - self.testparam.append(tparm) - else: - raise ValueError( - 'Unrecognised parameter type "%s" for\ - selecttext parameter %s in makeXML' - % (newtype, newname) - ) - for p in self.collections: - newkind = p["kind"] - newname = p["name"] - newlabel = p["label"] - newdisc = p["discover"] - collect = gxtp.OutputCollection(newname, label=newlabel, type=newkind) - disc = gxtp.DiscoverDatasets( - pattern=newdisc, directory=f"{newname}", visible="false" - ) - collect.append(disc) - self.toutputs.append(collect) - try: - tparm = gxtp.TestOutputCollection(newname) # broken until PR merged. - self.testparam.append(tparm) - except Exception: - print("#### WARNING: Galaxyxml version does not have the PR merged yet - tests for collections must be over-ridden until then!") - - def doNoXMLparam(self): - """filter style package - stdin to stdout""" - if len(self.infiles) > 0: - alab = self.infiles[0]["label"] - if len(alab) == 0: - alab = self.infiles[0]["infilename"] - max1s = ( - "Maximum one input if parampass is 0 but multiple input files supplied - %s" - % str(self.infiles) - ) - assert len(self.infiles) == 1, max1s - newname = self.infiles[0]["infilename"] - aninput = gxtp.DataParam( - newname, - optional=False, - label=alab, - help=self.infiles[0]["help"], - format=self.infiles[0]["format"], - multiple=False, - num_dashes=0, - ) - aninput.command_line_override = "< $%s" % newname - aninput.positional = True - self.tinputs.append(aninput) - tp = gxtp.TestParam(name=newname, value="%s_sample" % newname) - self.testparam.append(tp) - if len(self.outfiles) > 0: - newname = self.outfiles[0]["name"] - newfmt = self.outfiles[0]["format"] - anout = gxtp.OutputData(newname, format=newfmt, num_dashes=0) - anout.command_line_override = "> $%s" % newname - anout.positional = self.is_positional - self.toutputs.append(anout) - tp = gxtp.TestOutput(name=newname, value="%s_sample" % newname) - self.testparam.append(tp) - - def makeXML(self): # noqa - """ - Create a Galaxy xml tool wrapper for the new script - Uses galaxyhtml - Hmmm. How to get the command line into correct order... - """ - if self.command_override: - self.newtool.command_override = self.command_override # config file - else: - self.newtool.command_override = self.xmlcl - cite = gxtp.Citations() - acite = gxtp.Citation(type="doi", value="10.1093/bioinformatics/bts573") - cite.append(acite) - self.newtool.citations = cite - safertext = "" - if self.args.help_text: - helptext = open(self.args.help_text, "r").readlines() - safertext = "\n".join([cheetah_escape(x) for x in helptext]) - if len(safertext.strip()) == 0: - safertext = ( - "Ask the tool author (%s) to rebuild with help text please\n" - % (self.args.user_email) - ) - if self.args.script_path: - if len(safertext) > 0: - safertext = safertext + "\n\n------\n" # transition allowed! - scr = [x for x in self.spacedScript if x.strip() > ""] - scr.insert(0, "\n\nScript::\n") - if len(scr) > 300: - scr = ( - scr[:100] - + [" >300 lines - stuff deleted", " ......"] - + scr[-100:] - ) - scr.append("\n") - safertext = safertext + "\n".join(scr) - self.newtool.help = safertext - self.newtool.version_command = f'echo "{self.args.tool_version}"' - std = gxtp.Stdios() - std1 = gxtp.Stdio() - std.append(std1) - self.newtool.stdios = std - requirements = gxtp.Requirements() - if self.args.packages: - for d in self.args.packages.split(","): - ver = "" - d = d.replace("==", ":") - d = d.replace("=", ":") - if ":" in d: - packg, ver = d.split(":") - else: - packg = d - requirements.append( - gxtp.Requirement("package", packg.strip(), ver.strip()) - ) - self.newtool.requirements = requirements - if self.args.parampass == "0": - self.doNoXMLparam() - else: - self.doXMLparam() - self.newtool.outputs = self.toutputs - self.newtool.inputs = self.tinputs - if self.args.script_path: - configfiles = gxtp.Configfiles() - configfiles.append( - gxtp.Configfile(name="runme", text="\n".join(self.escapedScript)) - ) - self.newtool.configfiles = configfiles - tests = gxtp.Tests() - test_a = gxtp.Test() - for tp in self.testparam: - test_a.append(tp) - tests.append(test_a) - self.newtool.tests = tests - self.newtool.add_comment( - "Created by %s at %s using the Galaxy Tool Factory." - % (self.args.user_email, timenow()) - ) - self.newtool.add_comment("Source in git at: %s" % (toolFactoryURL)) - exml0 = self.newtool.export() - exml = exml0.replace(FAKEEXE, "") # temporary work around until PR accepted - if ( - self.test_override - ): # cannot do this inside galaxyxml as it expects lxml objects for tests - part1 = exml.split("")[0] - part2 = exml.split("")[1] - fixed = "%s\n%s\n%s" % (part1, "\n".join(self.test_override), part2) - exml = fixed - # exml = exml.replace('range="1:"', 'range="1000:"') - xf = open("%s.xml" % self.tool_name, "w") - xf.write(exml) - xf.write("\n") - xf.close() - # ready for the tarball - - def run(self): - """ - generate test outputs by running a command line - won't work if command or test override in play - planemo is the - easiest way to generate test outputs for that case so is - automagically selected - """ - scl = " ".join(self.cl) - err = None - if self.args.parampass != "0": - if os.path.exists(self.elog): - ste = open(self.elog, "a") - else: - ste = open(self.elog, "w") - if self.lastclredirect: - sto = open(self.lastclredirect[1], "wb") # is name of an output file - else: - if os.path.exists(self.tlog): - sto = open(self.tlog, "a") - else: - sto = open(self.tlog, "w") - sto.write( - "## Executing Toolfactory generated command line = %s\n" % scl - ) - sto.flush() - subp = subprocess.run( - self.cl, shell=False, stdout=sto, stderr=ste - ) - sto.close() - ste.close() - retval = subp.returncode - else: # work around special case - stdin and write to stdout - if len(self.infiles) > 0: - sti = open(self.infiles[0]["name"], "rb") - else: - sti = sys.stdin - if len(self.outfiles) > 0: - sto = open(self.outfiles[0]["name"], "wb") - else: - sto = sys.stdout - subp = subprocess.run( - self.cl, shell=False, stdout=sto, stdin=sti - ) - sto.write("## Executing Toolfactory generated command line = %s\n" % scl) - retval = subp.returncode - sto.close() - sti.close() - if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0: - os.unlink(self.tlog) - if os.path.isfile(self.elog) and os.stat(self.elog).st_size == 0: - os.unlink(self.elog) - if retval != 0 and err: # problem - sys.stderr.write(err) - logging.debug("run done") - return retval - - def shedLoad(self): - """ - use bioblend to create new repository - or update existing - - """ - if os.path.exists(self.tlog): - sto = open(self.tlog, "a") - else: - sto = open(self.tlog, "w") - - ts = toolshed.ToolShedInstance( - url=self.args.toolshed_url, - key=self.args.toolshed_api_key, - verify=False, - ) - repos = ts.repositories.get_repositories() - rnames = [x.get("name", "?") for x in repos] - rids = [x.get("id", "?") for x in repos] - tfcat = "ToolFactory generated tools" - if self.tool_name not in rnames: - tscat = ts.categories.get_categories() - cnames = [x.get("name", "?").strip() for x in tscat] - cids = [x.get("id", "?") for x in tscat] - catID = None - if tfcat.strip() in cnames: - ci = cnames.index(tfcat) - catID = cids[ci] - res = ts.repositories.create_repository( - name=self.args.tool_name, - synopsis="Synopsis:%s" % self.args.tool_desc, - description=self.args.tool_desc, - type="unrestricted", - remote_repository_url=self.args.toolshed_url, - homepage_url=None, - category_ids=catID, - ) - tid = res.get("id", None) - sto.write(f"#create_repository {self.args.tool_name} tid={tid} res={res}\n") - else: - i = rnames.index(self.tool_name) - tid = rids[i] - try: - res = ts.repositories.update_repository( - id=tid, tar_ball_path=self.newtarpath, commit_message=None - ) - sto.write(f"#update res id {id} ={res}\n") - except ConnectionError: - sto.write( - "####### Is the toolshed running and the API key correct? Bioblend shed upload failed\n" - ) - sto.close() - - def eph_galaxy_load(self): - """ - use ephemeris to load the new tool from the local toolshed after planemo uploads it - """ - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - cll = [ - "shed-tools", - "install", - "-g", - self.args.galaxy_url, - "--latest", - "-a", - self.args.galaxy_api_key, - "--name", - self.tool_name, - "--owner", - "fubar", - "--toolshed", - self.args.toolshed_url, - "--section_label", - "ToolFactory", - ] - tout.write("running\n%s\n" % " ".join(cll)) - subp = subprocess.run( - cll, - cwd=self.ourcwd, - shell=False, - stderr=tout, - stdout=tout, - ) - tout.write( - "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) - ) - tout.close() - return subp.returncode - - def writeShedyml(self): - """for planemo""" - yuser = self.args.user_email.split("@")[0] - yfname = os.path.join(self.tooloutdir, ".shed.yml") - yamlf = open(yfname, "w") - odict = { - "name": self.tool_name, - "owner": yuser, - "type": "unrestricted", - "description": self.args.tool_desc, - "synopsis": self.args.tool_desc, - "category": "TF Generated Tools", - } - yaml.dump(odict, yamlf, allow_unicode=True) - yamlf.close() - - def makeTool(self): - """write xmls and input samples into place""" - if self.args.parampass == 0: - self.doNoXMLparam() - else: - self.makeXML() - if self.args.script_path: - stname = os.path.join(self.tooloutdir, self.sfile) - if not os.path.exists(stname): - shutil.copyfile(self.sfile, stname) - xreal = "%s.xml" % self.tool_name - xout = os.path.join(self.tooloutdir, xreal) - shutil.copyfile(xreal, xout) - for p in self.infiles: - pth = p["name"] - dest = os.path.join(self.testdir, "%s_sample" % p["infilename"]) - shutil.copyfile(pth, dest) - dest = os.path.join(self.repdir, "%s_sample" % p["infilename"]) - shutil.copyfile(pth, dest) - - def makeToolTar(self, report_fail=False): - """move outputs into test-data and prepare the tarball""" - excludeme = "_planemo_test_report.html" - - def exclude_function(tarinfo): - filename = tarinfo.name - return None if filename.endswith(excludeme) else tarinfo - - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - for p in self.outfiles: - oname = p["name"] - tdest = os.path.join(self.testdir, "%s_sample" % oname) - src = os.path.join(self.testdir, oname) - if not os.path.isfile(tdest): - if os.path.isfile(src): - shutil.copyfile(src, tdest) - dest = os.path.join(self.repdir, "%s.sample" % (oname)) - shutil.copyfile(src, dest) - else: - if report_fail: - tout.write( - "###Tool may have failed - output file %s not found in testdir after planemo run %s." - % (tdest, self.testdir) - ) - tf = tarfile.open(self.newtarpath, "w:gz") - tf.add( - name=self.tooloutdir, - arcname=self.tool_name, - filter=exclude_function, - ) - tf.close() - shutil.copyfile(self.newtarpath, self.args.new_tool) - - def moveRunOutputs(self): - """need to move planemo or run outputs into toolfactory collection""" - with os.scandir(self.tooloutdir) as outs: - for entry in outs: - if not entry.is_file(): - continue - if "." in entry.name: - _, ext = os.path.splitext(entry.name) - if ext in [".tgz", ".json"]: - continue - if ext in [".yml", ".xml", ".yaml"]: - newname = f"{entry.name.replace('.','_')}.txt" - else: - newname = entry.name - else: - newname = f"{entry.name}.txt" - dest = os.path.join(self.repdir, newname) - src = os.path.join(self.tooloutdir, entry.name) - shutil.copyfile(src, dest) - if self.args.include_tests: - with os.scandir(self.testdir) as outs: - for entry in outs: - if (not entry.is_file()) or entry.name.endswith( - "_planemo_test_report.html" - ): - continue - if "." in entry.name: - _, ext = os.path.splitext(entry.name) - if ext in [".tgz", ".json"]: - continue - if ext in [".yml", ".xml", ".yaml"]: - newname = f"{entry.name.replace('.','_')}.txt" - else: - newname = entry.name - else: - newname = f"{entry.name}.txt" - dest = os.path.join(self.repdir, newname) - src = os.path.join(self.testdir, entry.name) - shutil.copyfile(src, dest) - - def planemo_test_once(self): - """planemo is a requirement so is available for testing but needs a - different call if in the biocontainer - see above - and for generating test outputs if command or test overrides are - supplied test outputs are sent to repdir for display - """ - penv = os.environ - phome = penv['HOME'] - isDocker = os.path.exists('/.dockerenv') - if isDocker: # use the volume if it exists - phome = '/home/planemo' - if os.path.exists(phome): # is mounted - home = phome - else: - home = '/tmp/planemo' # this will be brutal but otherwise /home/galaxy - os.mkdir('/tmp/planemo') - penv["HOME"] = phome - path = penv['PATH'] - penv['PATH'] = '%s:%s' % (phome,path) - print(f"#### set home to {phome} with path={penv['PATH']}") - #pconfig = os.path.join(phome,'.planemo.yml') - #penv["PLANEMO_GLOBAL_CONFIG_PATH"] = pconfig - # self.set_planemo_galaxy_root(self.args.galaxy_root, config_path=pconfig) - xreal = "%s.xml" % self.tool_name - tool_test_path = os.path.join( - self.repdir, f"{self.tool_name}_planemo_test_report.html" - ) - if os.path.exists(self.tlog): - tout = open(self.tlog, "a") - else: - tout = open(self.tlog, "w") - cll = [ - "planemo", - "test", - "--galaxy_python_version", - self.args.python_version, - "--test_data", - os.path.abspath(self.testdir), - "--test_output", - os.path.abspath(tool_test_path), - ## "--galaxy_root", - ## self.args.galaxy_root, - "--update_test_data", - os.path.abspath(xreal), - ] - p = subprocess.run( - cll, - env = penv, - shell=False, - cwd=self.tooloutdir, - stderr=tout, - stdout=tout, - ) - tout.close() - return p.returncode - - def set_planemo_galaxy_root(self, galaxyroot, config_path): - # planemo tries to write to ~/.planemo - trying to convince it otherwise - CONFIG_TEMPLATE = """## Planemo Global Configuration File. -## Everything in this file is completely optional - these values can all be -## configured via command line options for the corresponding commands. -## Specify a default galaxy_root for test and server commands here. -galaxy_root: %s -## Username used with toolshed(s). -#shed_username: "" -sheds: - # For each tool shed you wish to target, uncomment key or both email and - # password. - toolshed: - #key: "" - #email: "" - #password: "" - testtoolshed: - #key: "" - #email: "" - #password: "" - local: - #key: "" - #email: "" - #password: "" -""" - if not os.path.exists(config_path): - with open(config_path, "w") as f: - f.write(CONFIG_TEMPLATE % galaxyroot) - - -def main(): - """ - This is a Galaxy wrapper. - It expects to be called by a special purpose tool.xml - - """ - parser = argparse.ArgumentParser() - a = parser.add_argument - a("--script_path", default=None) - a("--history_test", default=None) - a("--cl_suffix", default=None) - a("--sysexe", default=None) - a("--packages", default=None) - a("--tool_name", default="newtool") - a("--tool_dir", default=None) - a("--input_files", default=[], action="append") - a("--output_files", default=[], action="append") - a("--user_email", default="Unknown") - a("--bad_user", default=None) - a("--make_Tool", default="runonly") - a("--help_text", default=None) - a("--tool_desc", default=None) - a("--tool_version", default=None) - a("--citations", default=None) - a("--command_override", default=None) - a("--test_override", default=None) - a("--additional_parameters", action="append", default=[]) - a("--selecttext_parameters", action="append", default=[]) - a("--edit_additional_parameters", action="store_true", default=False) - a("--parampass", default="positional") - a("--tfout", default="./tfout") - a("--new_tool", default="new_tool") - a("--galaxy_url", default="http://localhost:8080") - a("--toolshed_url", default="http://localhost:9009") - # make sure this is identical to tool_sheds_conf.xml - # localhost != 127.0.0.1 so validation fails - a("--toolshed_api_key", default="fakekey") - a("--galaxy_api_key", default="fakekey") - a("--galaxy_root", default="/galaxy-central") - a("--galaxy_venv", default="/galaxy_venv") - a("--collection", action="append", default=[]) - a("--include_tests", default=False, action="store_true") - a("--python_version", default="3.9") - args = parser.parse_args() - assert not args.bad_user, ( - 'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy \ -admin adds %s to "admin_users" in the galaxy.yml Galaxy configuration file' - % (args.bad_user, args.bad_user) - ) - assert args.tool_name, "## Tool Factory expects a tool name - eg --tool_name=DESeq" - assert ( - args.sysexe or args.packages - ), "## Tool Factory wrapper expects an interpreter \ -or an executable package in --sysexe or --packages" - r = ScriptRunner(args) - r.writeShedyml() - r.makeTool() - if args.make_Tool == "generate": - r.run() - r.moveRunOutputs() - r.makeToolTar() - else: - r.planemo_test_once() - r.moveRunOutputs() - r.makeToolTar(report_fail=True) - if args.make_Tool == "gentestinstall": - r.shedLoad() - r.eph_galaxy_load() - - -if __name__ == "__main__": - main() diff -r c4f192ec521c -r 2a46da701dde toolfactory/rgToolFactory2.xml --- a/toolfactory/rgToolFactory2.xml Tue Apr 20 05:30:52 2021 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,612 +0,0 @@ - - Scripts into tools v2.0 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
- - - galaxyxml - planemo - ephemeris - - - 0: ---cl_suffix "$cl_suffix" - #end if - #if $cover.commover == "yes": - #if len(str($cover.command_override)) > 10: ---command_override "$commandoverride" - #end if - #if len(str($cover.test_override)) > 10: ---test_override "$testoverride" - #end if - #end if ---packages "$deps.packages" - #if $deps.usescript.choosescript == "yes": ---script_path "$runme" ---sysexe "$deps.usescript.scriptrunner" - #end if ---tool_name "$tool_name" --user_email "$__user_email__" --citations "$citeme" --parampass "$io_param.ppass.parampass" - - #if str($make.makeMode.make_Tool)!="runonly": ---make_Tool "$make.makeMode.make_Tool" ---tool_desc "$make.makeMode.tool_desc" ---tool_version "$make.makeMode.tool_version" ---help_text "$helpme" ---new_tool "$new_tool" ---toolshed_api_key "$make.makeMode.toolshed_apikey" ---galaxy_api_key "$make.makeMode.galaxy_apikey" ---toolshed_url "$make.makeMode.toolshed_url" ---galaxy_url "$make.makeMode.galaxy_url" - #end if - #if $io_param.ppass.parampass != '0': - #if str($io_param.ppass.addparam.edit_params) == "yes": ---edit_additional_parameters - #end if - #for $apar in $io_param.ppass.addparam.additional_parameters: - #if $apar.ap_type.param_type=="selecttext": ---selecttext_parameters '{"name":"$apar.param_name", "label":"$apar.param_label", "help":"$apar.param_help", -"type":"$apar.ap_type.param_type","CL":"$apar.param_CL","override":"$apar.param_CLprefixed","value": [ - #for $i,$st in enumerate($apar.ap_type.selectTexts): - "$st.select_value" - #if ($i < (len($apar.ap_type.selectTexts)-1)): - , - #end if - #end for - ], "texts": [ - #for $i,$st in enumerate($apar.ap_type.selectTexts): - "$st.select_text" - #if ($i < (len($apar.ap_type.selectTexts)-1)): - , - #end if - - #end for - ] - }' - #else: ---additional_parameters '{"name": "$apar.param_name", "value": "$apar.ap_type.param_value", "label": "$apar.param_label", "help": "$apar.param_help", -"type": "$apar.ap_type.param_type","CL": "$apar.param_CL","override": "$apar.param_CLprefixed", "repeat": "$apar.param_repeat"}' - #end if - #end for - #end if - #for $intab in $io_param.ppass.io.history_inputs: ---input_files '{"name": "$intab.input_files", "CL": "$intab.input_CL", "format": "$intab.input_formats", "label": "$intab.input_label", "help": "$intab.input_help", "repeat": "$intab.input_repeat"}' - #end for - #for $otab in $io_param.ppass.io.history_outputs: ---output_files '{"name": "$otab.history_name", "format": "$otab.history_format", "CL": "$otab.history_CL", "test": "$otab.history_test"}' - #end for - #for $collect in $io_param.ppass.io.collection_outputs: ---collection '{"name": "$collect.name", "kind": "$collect.kind", "discover": "$collect.discover", "label": "$collect.label"}' - #end for ---galaxy_root "$__root_dir__" ---tool_dir "$__tool_directory__" - #end if -]]> - - -$deps.usescript.dynScript - - -#if $cover.commover == "yes" and len(str($cover.command_override).strip()) > 1: -$cover.command_override -#end if - - -#if $cover.commover == "yes" and len(str($cover.test_override).strip()) > 1: -$cover.test_override -#end if - - - #if $make.makeMode.make_Tool != "runonly": -${make.makeMode.help_text} - #else -$tool_name help goes here - #end if - - -#if $make.makeMode.make_Tool != "runonly": - #for $citation in $make.makeMode.citations: - #if $citation.citation_type.type == "bibtex": - **ENTRY**bibtex - ${citation.citation_type.bibtex} - #else - **ENTRY**doi - ${citation.citation_type.doi} - #end if - #end for -#end if - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - makeMode['make_Tool'] != "runonly" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -.. class:: warningmark - -**Details and attribution** -(see GTF_) - -**Local Admins ONLY** -Only users whose IDs found in the local admin_user configuration setting in universe_wsgi.ini can run this tool. - -**If you find a bug** -Please raise an issue, or even better, submit a pull request fixing it, on the github repository GTF_ - -**What it does** -This tool optionally generates normal workflow compatible first class Galaxy tools - -Generated tools can run existing binary packages that become requirements, existing scripts, or new scripts pasted into this tool form. -Pasted scripts are written so they are part of the new tool and cannot be adjusted by the downstream user. -Binary packages are managed by the dependency subsystem - conda usually, so anything in bioconda or conda_forge is available for example. - -Any number of parameters can be built into the new tool form for passing in to the script or executable at runtime. -These can be editable by the downstream user or baked in. - -When you run this tool, your executable or script and supplied parameter values will be run to produce a canonical -set of outputs - these are used to construct a test for the new tool. - -If tool generation is required, a new tarball compatible with any Galaxy toolshed is created. -It can be unpacked in your galaxy/tools directory and manually added to tool_conf.xml, or -installed into any toolshed from where it can be installed into your Galaxy. - - -.. class:: warningmark - -**Note to system administrators** -This tool offers *NO* built in protection against malicious scripts. It should only be installed on private/personnal Galaxy instances. -Admin_users will have the power to do anything they want as the Galaxy user if you install this tool. - -.. class:: warningmark - -**Use on public servers** is STRONGLY discouraged for obvious reasons - -The tools generated by this tool will run just as securely as any other normal installed Galaxy tool but like any other new tools, should always be checked carefully before installation. -We recommend that you follow the good code hygiene practices associated with safe toolshed practices. - -Here's a sample python script that can be cut and pasted into the tool form, suitable for positional parameter passing: - -:: - - # reverse order of text by row - import sys - inp = sys.argv[1] - outp = sys.argv[2] - i = open(inp,'r').readlines() - o = open(outp,'w') - for row in i: - rs = row.rstrip() - rs = list(rs) - rs.reverse() - o.write(''.join(rs)) - o.write('\n') - o.close() - -With argparse style parameters: - -:: - - # reverse order of text by row - import argparse - parser = argparse.ArgumentParser() - a = parser.add_argument - a('--infile',default='') - a('--outfile',default=None) - args = parser.parse_args() - inp = args.infile - outp = args.outfile - i = open(inp,'r').readlines() - o = open(outp,'w') - for row in i: - rs = row.rstrip() - rs = list(rs) - rs.reverse() - o.write(''.join(rs)) - o.write('\n') - o.close() - -R script to draw some plots - use a collection. - -:: - - - \# note this script takes NO input because it generates random data - dir.create('plots') - for (i in 1:10) { - foo = runif(100) - bar = rnorm(100) - bar = foo + 0.05*bar - pdf(paste('plots/yet',i,"anotherplot.pdf",sep='_')) - plot(foo,bar,main=paste("Foo by Bar plot \#",i),col="maroon", pch=3,cex=0.6) - dev.off() - foo = data.frame(a=runif(100),b=runif(100),c=runif(100),d=runif(100),e=runif(100),f=runif(100)) - bar = as.matrix(foo) - pdf(paste('plots/yet',i,"anotherheatmap.pdf",sep='_')) - heatmap(bar,main='Random Heatmap') - dev.off() - } - - - -Paper_ - -*Licensing* - -Copyright Ross Lazarus (ross period lazarus at gmail period com) May 2012 -All rights reserved. -Licensed under the LGPL_ - -.. _LGPL: http://www.gnu.org/copyleft/lesser.html -.. _GTF: https://github.com/fubar2/toolfactory -.. _Paper: https://academic.oup.com/bioinformatics/article/28/23/3139/192853 - - - - - 10.1093/bioinformatics/bts573 - -
- - diff -r c4f192ec521c -r 2a46da701dde toolfactory/test-data/test1_log.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/test-data/test1_log.txt Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,1 @@ +## Executing Toolfactory generated command line = python /tmp/pyrevposq5dmcdy1.python /tmp/tmpqrksf8sd/files/5/b/9/dataset_5b952a86-87df-44ad-a415-ea549f3f0cee.dat output2 diff -r c4f192ec521c -r 2a46da701dde toolfactory/tfout/.shed.yml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/tfout/.shed.yml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,6 @@ +category: TF Generated Tools +description: Makes random plots +name: plotter +owner: planemo +synopsis: Makes random plots +type: unrestricted diff -r c4f192ec521c -r 2a46da701dde toolfactory/tfout/plotter.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/toolfactory/tfout/plotter.xml Mon Apr 26 05:25:26 2021 +0000 @@ -0,0 +1,94 @@ + + + + Makes random plots + + r-base + + + + + + + + + + + + + + + + + + + + + + + + + + + 10.1093/bioinformatics/bts573 + + +