Mercurial > repos > fubar > tool_factory_2
comparison toolfactory/rgToolFactory2.py @ 106:6de08f44e551 draft
Uploaded
| author | fubar |
|---|---|
| date | Sat, 28 Nov 2020 00:50:29 +0000 |
| parents | 373e1f9cb609 |
| children | 18e616939a91 |
comparison
equal
deleted
inserted
replaced
| 105:373e1f9cb609 | 106:6de08f44e551 |
|---|---|
| 555 scrp = self.script.split("\n") | 555 scrp = self.script.split("\n") |
| 556 scrpt = [" %s" % x for x in scrp] # try to stop templating | 556 scrpt = [" %s" % x for x in scrp] # try to stop templating |
| 557 scrpt.insert(0, "```\n") | 557 scrpt.insert(0, "```\n") |
| 558 if len(scrpt) > 300: | 558 if len(scrpt) > 300: |
| 559 safertext = ( | 559 safertext = ( |
| 560 safertext + scrpt[:100] + \ | 560 safertext |
| 561 [">500 lines - stuff deleted", "......"] + scrpt[-100:] | 561 + scrpt[:100] |
| 562 + [">500 lines - stuff deleted", "......"] | |
| 563 + scrpt[-100:] | |
| 562 ) | 564 ) |
| 563 else: | 565 else: |
| 564 safertext = safertext + scrpt | 566 safertext = safertext + scrpt |
| 565 safertext.append("\n```") | 567 safertext.append("\n```") |
| 566 self.newtool.help = "\n".join([x for x in safertext]) | 568 self.newtool.help = "\n".join([x for x in safertext]) |
| 647 sto = open(self.tlog, "w") | 649 sto = open(self.tlog, "w") |
| 648 sto.write( | 650 sto.write( |
| 649 "## Executing Toolfactory generated command line = %s\n" % scl | 651 "## Executing Toolfactory generated command line = %s\n" % scl |
| 650 ) | 652 ) |
| 651 sto.flush() | 653 sto.flush() |
| 652 subp = subprocess.run(self.cl, env=self.ourenv, shell=False, stdout=sto, stderr=ste) | 654 subp = subprocess.run( |
| 655 self.cl, env=self.ourenv, shell=False, stdout=sto, stderr=ste | |
| 656 ) | |
| 653 sto.close() | 657 sto.close() |
| 654 ste.close() | 658 ste.close() |
| 655 retval = subp.returncode | 659 retval = subp.returncode |
| 656 else: # work around special case - stdin and write to stdout | 660 else: # work around special case - stdin and write to stdout |
| 657 if len(self.infiles) > 0: | 661 if len(self.infiles) > 0: |
| 660 sti = sys.stdin | 664 sti = sys.stdin |
| 661 if len(self.outfiles) > 0: | 665 if len(self.outfiles) > 0: |
| 662 sto = open(self.outfiles[0][ONAMEPOS], "wb") | 666 sto = open(self.outfiles[0][ONAMEPOS], "wb") |
| 663 else: | 667 else: |
| 664 sto = sys.stdout | 668 sto = sys.stdout |
| 665 subp = subprocess.run(self.cl, env=self.ourenv, shell=False, stdout=sto, stdin=sti) | 669 subp = subprocess.run( |
| 670 self.cl, env=self.ourenv, shell=False, stdout=sto, stdin=sti | |
| 671 ) | |
| 666 sto.write("## Executing Toolfactory generated command line = %s\n" % scl) | 672 sto.write("## Executing Toolfactory generated command line = %s\n" % scl) |
| 667 retval = subp.returncode | 673 retval = subp.returncode |
| 668 sto.close() | 674 sto.close() |
| 669 sti.close() | 675 sti.close() |
| 670 if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0: | 676 if os.path.isfile(self.tlog) and os.stat(self.tlog).st_size == 0: |
| 674 if retval != 0 and err: # problem | 680 if retval != 0 and err: # problem |
| 675 sys.stderr.write(err) | 681 sys.stderr.write(err) |
| 676 logging.debug("run done") | 682 logging.debug("run done") |
| 677 return retval | 683 return retval |
| 678 | 684 |
| 679 | |
| 680 def copy_to_container(self, src, dest, container): | 685 def copy_to_container(self, src, dest, container): |
| 681 """ Recreate the src directory tree at dest - full path included | 686 """Recreate the src directory tree at dest - full path included""" |
| 682 """ | |
| 683 idir = os.getcwd() | 687 idir = os.getcwd() |
| 684 workdir = os.path.dirname(src) | 688 workdir = os.path.dirname(src) |
| 685 os.chdir(workdir) | 689 os.chdir(workdir) |
| 686 _, tfname = tempfile.mkstemp(suffix=".tar") | 690 _, tfname = tempfile.mkstemp(suffix=".tar") |
| 687 tar = tarfile.open(tfname, mode='w') | 691 tar = tarfile.open(tfname, mode="w") |
| 688 srcb = os.path.basename(src) | 692 srcb = os.path.basename(src) |
| 689 tar.add(srcb) | 693 tar.add(srcb) |
| 690 tar.close() | 694 tar.close() |
| 691 data = open(tfname, 'rb').read() | 695 data = open(tfname, "rb").read() |
| 692 container.put_archive(dest, data) | 696 container.put_archive(dest, data) |
| 693 os.unlink(tfname) | 697 os.unlink(tfname) |
| 694 os.chdir(idir) | 698 os.chdir(idir) |
| 695 | 699 |
| 696 | |
| 697 def copy_from_container(self, src, dest, container): | 700 def copy_from_container(self, src, dest, container): |
| 698 """ recreate the src directory tree at dest using docker sdk | 701 """recreate the src directory tree at dest using docker sdk""" |
| 699 """ | 702 os.makedirs(dest, exist_ok=True) |
| 700 os.makedirs(dest,exist_ok=True) | |
| 701 _, tfname = tempfile.mkstemp(suffix=".tar") | 703 _, tfname = tempfile.mkstemp(suffix=".tar") |
| 702 tf = open(tfname,'wb') | 704 tf = open(tfname, "wb") |
| 703 bits, stat = container.get_archive(src) | 705 bits, stat = container.get_archive(src) |
| 704 for chunk in bits: | 706 for chunk in bits: |
| 705 tf.write(chunk) | 707 tf.write(chunk) |
| 706 tf.close() | 708 tf.close() |
| 707 tar = tarfile.open(tfname,'r') | 709 tar = tarfile.open(tfname, "r") |
| 708 tar.extractall(dest) | 710 tar.extractall(dest) |
| 709 tar.close() | 711 tar.close() |
| 710 os.unlink(tfname) | 712 os.unlink(tfname) |
| 711 | 713 |
| 712 | |
| 713 | |
| 714 | |
| 715 def planemo_biodocker_test(self): | 714 def planemo_biodocker_test(self): |
| 716 """planemo currently leaks dependencies if used in the same container and gets unhappy after a | 715 """planemo currently leaks dependencies if used in the same container and gets unhappy after a |
| 717 first successful run. https://github.com/galaxyproject/planemo/issues/1078#issuecomment-731476930 | 716 first successful run. https://github.com/galaxyproject/planemo/issues/1078#issuecomment-731476930 |
| 718 | 717 |
| 719 Docker biocontainer has planemo with caches filled to save repeated downloads | 718 Docker biocontainer has planemo with caches filled to save repeated downloads |
| 720 | 719 |
| 721 | 720 |
| 722 """ | 721 """ |
| 723 def prun(container,tout,cl,user="biodocker"): | 722 |
| 724 rlog = container.exec_run(cl,user=user) | 723 def prun(container, tout, cl, user="biodocker"): |
| 725 slogl = str(rlog).split('\\n') | 724 rlog = container.exec_run(cl, user=user) |
| 726 slog = '\n'.join(slogl) | 725 slogl = str(rlog).split("\\n") |
| 726 slog = "\n".join(slogl) | |
| 727 tout.write(f"## got rlog {slog} from {cl}\n") | 727 tout.write(f"## got rlog {slog} from {cl}\n") |
| 728 | 728 |
| 729 dgroup = grp.getgrnam('docker')[2] | 729 dgroup = grp.getgrnam("docker")[2] |
| 730 if os.path.exists(self.tlog): | 730 if os.path.exists(self.tlog): |
| 731 tout = open(self.tlog, "a") | 731 tout = open(self.tlog, "a") |
| 732 else: | 732 else: |
| 733 tout = open(self.tlog, "w") | 733 tout = open(self.tlog, "w") |
| 734 planemoimage = "quay.io/fubar2/planemo-biocontainer" | 734 planemoimage = "quay.io/fubar2/planemo-biocontainer" |
| 735 xreal = "%s.xml" % self.tool_name | 735 xreal = "%s.xml" % self.tool_name |
| 736 repname = f"{self.tool_name}_planemo_test_report.html" | 736 repname = f"{self.tool_name}_planemo_test_report.html" |
| 737 ptestrep_path = os.path.join(self.repdir,repname) | 737 ptestrep_path = os.path.join(self.repdir, repname) |
| 738 tool_name = self.tool_name | 738 tool_name = self.tool_name |
| 739 client = docker.from_env() | 739 client = docker.from_env() |
| 740 tvol = client.volumes.create() | 740 tvol = client.volumes.create() |
| 741 tvolname = tvol.name | 741 tvolname = tvol.name |
| 742 destdir = "/toolfactory/ptest" | 742 destdir = "/toolfactory/ptest" |
| 743 imrep = os.path.join(destdir,repname) | 743 imrep = os.path.join(destdir, repname) |
| 744 # need to keep the container running so sleep a while - we stop and destroy it when we are done | 744 # need to keep the container running so sleep a while - we stop and destroy it when we are done |
| 745 container = client.containers.run(planemoimage,'sleep 30m', detach=True, user="biodocker", | 745 container = client.containers.run( |
| 746 network="host", volumes={f"{tvolname}": {'bind': '/toolfactory', 'mode': 'rw'}}) | 746 planemoimage, |
| 747 "sleep 30m", | |
| 748 detach=True, | |
| 749 user="biodocker", | |
| 750 volumes={f"{tvolname}": {"bind": "/toolfactory", "mode": "rw"}}, | |
| 751 ) | |
| 747 cl = f"groupmod -g {dgroup} docker" | 752 cl = f"groupmod -g {dgroup} docker" |
| 748 prun(container, tout, cl, user="root") | 753 prun(container, tout, cl, user="root") |
| 749 cl = f"mkdir -p {destdir}" | 754 cl = f"mkdir -p {destdir}" |
| 750 prun(container, tout, cl, user="root") | 755 prun(container, tout, cl, user="root") |
| 751 cl = f"rm -rf {destdir}/*" | 756 cl = f"rm -rf {destdir}/*" |
| 752 prun(container, tout, cl, user="root") | 757 prun(container, tout, cl, user="root") |
| 753 ptestpath = os.path.join(destdir,'tfout',xreal) | 758 ptestpath = os.path.join(destdir, "tfout", xreal) |
| 754 self.copy_to_container(self.tooloutdir,destdir,container) | 759 self.copy_to_container(self.tooloutdir, destdir, container) |
| 755 cl ='chmod -R a+rwx /toolfactory' | 760 cl = "chmod -R a+rwx /toolfactory" |
| 756 prun(container, tout, cl, user="root") | 761 prun(container, tout, cl, user="root") |
| 757 rlog = container.exec_run(f"ls -la {destdir}") | 762 rlog = container.exec_run(f"ls -la {destdir}") |
| 758 ptestcl = f"planemo test --update_test_data --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" | 763 ptestcl = f"planemo test --update_test_data --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" |
| 759 try: | 764 try: |
| 760 rlog = container.exec_run(ptestcl) | 765 rlog = container.exec_run(ptestcl) |
| 762 e = sys.exc_info()[0] | 767 e = sys.exc_info()[0] |
| 763 tout.write(f"#### error: {e} from {ptestcl}\n") | 768 tout.write(f"#### error: {e} from {ptestcl}\n") |
| 764 # fails - used to generate test outputs | 769 # fails - used to generate test outputs |
| 765 cl = f"planemo test --test_output {imrep} --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" | 770 cl = f"planemo test --test_output {imrep} --no_cleanup --test_data {destdir}/tfout/test-data --galaxy_root /home/biodocker/galaxy-central {ptestpath}" |
| 766 try: | 771 try: |
| 767 prun(container,tout,cl) | 772 prun(container, tout, cl) |
| 768 except: | 773 except: |
| 769 pass | 774 pass |
| 770 testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp",dir=".") | 775 testouts = tempfile.mkdtemp(suffix=None, prefix="tftemp", dir=".") |
| 771 self.copy_from_container(destdir,testouts,container) | 776 self.copy_from_container(destdir, testouts, container) |
| 772 src = os.path.join(testouts,'ptest') | 777 src = os.path.join(testouts, "ptest") |
| 773 if os.path.isdir(src): | 778 if os.path.isdir(src): |
| 774 shutil.copytree(src, '.', dirs_exist_ok=True) | 779 shutil.copytree(src, ".", dirs_exist_ok=True) |
| 775 src = repname | 780 src = repname |
| 776 if os.path.isfile(repname): | 781 if os.path.isfile(repname): |
| 777 shutil.copyfile(src,ptestrep_path) | 782 shutil.copyfile(src, ptestrep_path) |
| 778 else: | 783 else: |
| 779 tout.write(f"No output from run to shutil.copytree in {src}\n") | 784 tout.write(f"No output from run to shutil.copytree in {src}\n") |
| 780 tout.close() | 785 tout.close() |
| 781 container.stop() | 786 container.stop() |
| 782 container.remove() | 787 container.remove() |
| 783 tvol.remove() | 788 tvol.remove() |
| 784 #shutil.rmtree(testouts) | 789 # shutil.rmtree(testouts) |
| 785 | 790 |
| 786 def shedLoad(self): | 791 def shedLoad(self): |
| 787 """ | 792 """ |
| 788 {'deleted': False, | 793 {'deleted': False, |
| 789 'description': 'Tools for manipulating data', | 794 'description': 'Tools for manipulating data', |
| 830 else: | 835 else: |
| 831 i = rnames.index(self.tool_name) | 836 i = rnames.index(self.tool_name) |
| 832 tid = rids[i] | 837 tid = rids[i] |
| 833 try: | 838 try: |
| 834 res = ts.repositories.update_repository( | 839 res = ts.repositories.update_repository( |
| 835 id=tid, tar_ball_path=self.newtarpath, commit_message=None) | 840 id=tid, tar_ball_path=self.newtarpath, commit_message=None |
| 841 ) | |
| 836 sto.write(f"#####update res={res}\n") | 842 sto.write(f"#####update res={res}\n") |
| 837 except ConnectionError: | 843 except ConnectionError: |
| 838 sto.write("Probably no change to repository - bioblend shed upload failed\n") | 844 sto.write( |
| 845 "Probably no change to repository - bioblend shed upload failed\n" | |
| 846 ) | |
| 839 sto.close() | 847 sto.close() |
| 840 | 848 |
| 841 def eph_galaxy_load(self): | 849 def eph_galaxy_load(self): |
| 842 """load the new tool from the local toolshed after planemo uploads it""" | 850 """load the new tool from the local toolshed after planemo uploads it""" |
| 843 if os.path.exists(self.tlog): | 851 if os.path.exists(self.tlog): |
| 860 self.args.toolshed_url, | 868 self.args.toolshed_url, |
| 861 "--section_label", | 869 "--section_label", |
| 862 "ToolFactory", | 870 "ToolFactory", |
| 863 ] | 871 ] |
| 864 tout.write("running\n%s\n" % " ".join(cll)) | 872 tout.write("running\n%s\n" % " ".join(cll)) |
| 865 subp = subprocess.run(cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stderr=tout, stdout=tout) | 873 subp = subprocess.run( |
| 874 cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stderr=tout, stdout=tout | |
| 875 ) | |
| 866 tout.write( | 876 tout.write( |
| 867 "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) | 877 "installed %s - got retcode %d\n" % (self.tool_name, subp.returncode) |
| 868 ) | 878 ) |
| 869 tout.close() | 879 tout.close() |
| 870 return subp.returncode | 880 return subp.returncode |
| 891 url=self.args.toolshed_url, key=self.args.toolshed_api_key, verify=False | 901 url=self.args.toolshed_url, key=self.args.toolshed_api_key, verify=False |
| 892 ) | 902 ) |
| 893 repos = ts.repositories.get_repositories() | 903 repos = ts.repositories.get_repositories() |
| 894 rnames = [x.get("name", "?") for x in repos] | 904 rnames = [x.get("name", "?") for x in repos] |
| 895 rids = [x.get("id", "?") for x in repos] | 905 rids = [x.get("id", "?") for x in repos] |
| 896 #cat = "ToolFactory generated tools" | 906 # cat = "ToolFactory generated tools" |
| 897 if self.tool_name not in rnames: | 907 if self.tool_name not in rnames: |
| 898 cll = [ | 908 cll = [ |
| 899 "planemo", | 909 "planemo", |
| 900 "shed_create", | 910 "shed_create", |
| 901 "--shed_target", | 911 "--shed_target", |
| 907 "--shed_key", | 917 "--shed_key", |
| 908 self.args.toolshed_api_key, | 918 self.args.toolshed_api_key, |
| 909 ] | 919 ] |
| 910 try: | 920 try: |
| 911 subp = subprocess.run( | 921 subp = subprocess.run( |
| 912 cll, env=self.ourenv, shell=False, cwd=self.tooloutdir, stdout=tout, stderr=tout | 922 cll, |
| 923 env=self.ourenv, | |
| 924 shell=False, | |
| 925 cwd=self.tooloutdir, | |
| 926 stdout=tout, | |
| 927 stderr=tout, | |
| 913 ) | 928 ) |
| 914 except: | 929 except: |
| 915 pass | 930 pass |
| 916 if subp.returncode != 0: | 931 if subp.returncode != 0: |
| 917 tout.write("Repository %s exists\n" % self.tool_name) | 932 tout.write("Repository %s exists\n" % self.tool_name) |
| 929 "--shed_key", | 944 "--shed_key", |
| 930 self.args.toolshed_api_key, | 945 self.args.toolshed_api_key, |
| 931 "--tar", | 946 "--tar", |
| 932 self.newtarpath, | 947 self.newtarpath, |
| 933 ] | 948 ] |
| 934 subp = subprocess.run(cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stdout=tout, stderr=tout) | 949 subp = subprocess.run( |
| 935 tout.write("Ran %s got %d\n" % (" ".join(cll),subp.returncode)) | 950 cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stdout=tout, stderr=tout |
| 951 ) | |
| 952 tout.write("Ran %s got %d\n" % (" ".join(cll), subp.returncode)) | |
| 936 tout.close() | 953 tout.close() |
| 937 return subp.returncode | 954 return subp.returncode |
| 938 | 955 |
| 939 def eph_test(self, genoutputs=True): | 956 def eph_test(self, genoutputs=True): |
| 940 """problem getting jobid - ephemeris upload is the job before the one we want - but depends on how many inputs | 957 """problem getting jobid - ephemeris upload is the job before the one we want - but depends on how many inputs""" |
| 941 """ | |
| 942 if os.path.exists(self.tlog): | 958 if os.path.exists(self.tlog): |
| 943 tout = open(self.tlog, "a") | 959 tout = open(self.tlog, "a") |
| 944 else: | 960 else: |
| 945 tout = open(self.tlog, "w") | 961 tout = open(self.tlog, "w") |
| 946 cll = [ | 962 cll = [ |
| 956 "fubar", | 972 "fubar", |
| 957 ] | 973 ] |
| 958 if genoutputs: | 974 if genoutputs: |
| 959 dummy, tfile = tempfile.mkstemp() | 975 dummy, tfile = tempfile.mkstemp() |
| 960 subp = subprocess.run( | 976 subp = subprocess.run( |
| 961 cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stderr=dummy, stdout=dummy | 977 cll, |
| 962 ) | 978 env=self.ourenv, |
| 963 | 979 cwd=self.ourcwd, |
| 964 with open('tool_test_output.json','rb') as f: | 980 shell=False, |
| 981 stderr=dummy, | |
| 982 stdout=dummy, | |
| 983 ) | |
| 984 | |
| 985 with open("tool_test_output.json", "rb") as f: | |
| 965 s = json.loads(f.read()) | 986 s = json.loads(f.read()) |
| 966 print('read %s' % s) | 987 print("read %s" % s) |
| 967 cl = s['tests'][0]['data']['job']['command_line'].split() | 988 cl = s["tests"][0]["data"]["job"]["command_line"].split() |
| 968 n = cl.index('--script_path') | 989 n = cl.index("--script_path") |
| 969 jobdir = cl[n+1] | 990 jobdir = cl[n + 1] |
| 970 jobdir = jobdir.replace('"','') | 991 jobdir = jobdir.replace('"', "") |
| 971 jobdir = jobdir.split('/configs')[0] | 992 jobdir = jobdir.split("/configs")[0] |
| 972 print('jobdir=%s' % jobdir) | 993 print("jobdir=%s" % jobdir) |
| 973 | 994 |
| 974 #"/home/ross/galthrow/database/jobs_directory/000/649/configs/tmptfxu51gs\" | 995 # "/home/ross/galthrow/database/jobs_directory/000/649/configs/tmptfxu51gs\" |
| 975 src = os.path.join(jobdir,'working',self.newtarpath) | 996 src = os.path.join(jobdir, "working", self.newtarpath) |
| 976 if os.path.exists(src): | 997 if os.path.exists(src): |
| 977 dest = os.path.join(self.testdir, self.newtarpath) | 998 dest = os.path.join(self.testdir, self.newtarpath) |
| 978 shutil.copyfile(src, dest) | 999 shutil.copyfile(src, dest) |
| 979 else: | 1000 else: |
| 980 tout.write('No toolshed archive found after first ephemeris test - not a good sign') | 1001 tout.write( |
| 981 ephouts = os.path.join(jobdir,'working','tfout','test-data') | 1002 "No toolshed archive found after first ephemeris test - not a good sign" |
| 1003 ) | |
| 1004 ephouts = os.path.join(jobdir, "working", "tfout", "test-data") | |
| 982 with os.scandir(ephouts) as outs: | 1005 with os.scandir(ephouts) as outs: |
| 983 for entry in outs: | 1006 for entry in outs: |
| 984 if not entry.is_file(): | 1007 if not entry.is_file(): |
| 985 continue | 1008 continue |
| 986 dest = os.path.join(self.tooloutdir, entry.name) | 1009 dest = os.path.join(self.tooloutdir, entry.name) |
| 987 src = os.path.join(ephouts, entry.name) | 1010 src = os.path.join(ephouts, entry.name) |
| 988 shutil.copyfile(src, dest) | 1011 shutil.copyfile(src, dest) |
| 989 else: | 1012 else: |
| 990 subp = subprocess.run( | 1013 subp = subprocess.run( |
| 991 cll, env=self.ourenv, cwd=self.ourcwd, shell=False, stderr=tout, stdout=tout) | 1014 cll, |
| 1015 env=self.ourenv, | |
| 1016 cwd=self.ourcwd, | |
| 1017 shell=False, | |
| 1018 stderr=tout, | |
| 1019 stdout=tout, | |
| 1020 ) | |
| 992 tout.write("eph_test Ran %s got %d" % (" ".join(cll), subp.returncode)) | 1021 tout.write("eph_test Ran %s got %d" % (" ".join(cll), subp.returncode)) |
| 993 tout.close() | 1022 tout.close() |
| 994 return subp.returncode | 1023 return subp.returncode |
| 995 | 1024 |
| 996 def planemo_test_biocontainer(self, genoutputs=True): | 1025 def planemo_test_biocontainer(self, genoutputs=True): |
| 997 """planemo is a requirement so is available for testing but testing in a biocontainer | 1026 """planemo is a requirement so is available for testing but testing in a biocontainer |
| 998 requires some fiddling to use the hacked galaxy-central .venv | 1027 requires some fiddling to use the hacked galaxy-central .venv |
| 999 | 1028 |
| 1000 Planemo runs: | 1029 Planemo runs: |
| 1001 python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file | 1030 python ./scripts/functional_tests.py -v --with-nosehtml --html-report-file |
| 1002 /export/galaxy-central/database/job_working_directory/000/17/working/TF_run_report_tempdir/tacrev_planemo_test_report.html | 1031 /export/galaxy-central/database/job_working_directory/000/17/working/TF_run_report_tempdir/tacrev_planemo_test_report.html |
| 1003 --with-xunit --xunit-file /tmp/tmpt90p7f9h/xunit.xml --with-structureddata | 1032 --with-xunit --xunit-file /tmp/tmpt90p7f9h/xunit.xml --with-structureddata |
| 1004 --structured-data-file | 1033 --structured-data-file |
| 1005 /export/galaxy-central/database/job_working_directory/000/17/working/tfout/tool_test_output.json functional.test_toolbox | 1034 /export/galaxy-central/database/job_working_directory/000/17/working/tfout/tool_test_output.json functional.test_toolbox |
| 1006 | 1035 |
| 1007 | 1036 |
| 1008 for the planemo-biocontainer, | 1037 for the planemo-biocontainer, |
| 1009 planemo test --conda_dependency_resolution --skip_venv --galaxy_root /galthrow/ rgToolFactory2.xml | 1038 planemo test --conda_dependency_resolution --skip_venv --galaxy_root /galthrow/ rgToolFactory2.xml |
| 1010 | 1039 |
| 1011 """ | 1040 """ |
| 1012 xreal = "%s.xml" % self.tool_name | 1041 xreal = "%s.xml" % self.tool_name |
| 1013 tool_test_path = os.path.join(self.repdir,f"{self.tool_name}_planemo_test_report.html") | 1042 tool_test_path = os.path.join( |
| 1043 self.repdir, f"{self.tool_name}_planemo_test_report.html" | |
| 1044 ) | |
| 1014 if os.path.exists(self.tlog): | 1045 if os.path.exists(self.tlog): |
| 1015 tout = open(self.tlog, "a") | 1046 tout = open(self.tlog, "a") |
| 1016 else: | 1047 else: |
| 1017 tout = open(self.tlog, "w") | 1048 tout = open(self.tlog, "w") |
| 1018 if genoutputs: | 1049 if genoutputs: |
| 1019 dummy, tfile = tempfile.mkstemp() | 1050 dummy, tfile = tempfile.mkstemp() |
| 1020 cll = [ | 1051 cll = [ |
| 1021 ".", os.path.join(self.args.galaxy_root,'.venv','bin','activate'),"&&", | 1052 ".", |
| 1053 os.path.join(self.args.galaxy_root, ".venv", "bin", "activate"), | |
| 1054 "&&", | |
| 1022 "planemo", | 1055 "planemo", |
| 1023 "test", | 1056 "test", |
| 1024 "--test_data", self.testdir, | 1057 "--test_data", |
| 1025 "--test_output", tool_test_path, | 1058 self.testdir, |
| 1059 "--test_output", | |
| 1060 tool_test_path, | |
| 1026 "--skip_venv", | 1061 "--skip_venv", |
| 1027 "--galaxy_root", | 1062 "--galaxy_root", |
| 1028 self.args.galaxy_root, | 1063 self.args.galaxy_root, |
| 1029 "--update_test_data", | 1064 "--update_test_data", |
| 1030 xreal, | 1065 xreal, |
| 1038 stdout=dummy, | 1073 stdout=dummy, |
| 1039 ) | 1074 ) |
| 1040 | 1075 |
| 1041 else: | 1076 else: |
| 1042 cll = [ | 1077 cll = [ |
| 1043 ".", os.path.join(self.args.galaxy_root,'.venv','bin','activate'),"&&", | 1078 ".", |
| 1079 os.path.join(self.args.galaxy_root, ".venv", "bin", "activate"), | |
| 1080 "&&", | |
| 1044 "planemo", | 1081 "planemo", |
| 1045 "test", | 1082 "test", |
| 1046 "--test_data", os.path.self.testdir, | 1083 "--test_data", |
| 1047 "--test_output", os.path.tool_test_path, | 1084 os.path.self.testdir, |
| 1085 "--test_output", | |
| 1086 os.path.tool_test_path, | |
| 1048 "--skip_venv", | 1087 "--skip_venv", |
| 1049 "--galaxy_root", | 1088 "--galaxy_root", |
| 1050 self.args.galaxy_root, | 1089 self.args.galaxy_root, |
| 1051 xreal, | 1090 xreal, |
| 1052 ] | 1091 ] |
| 1053 subp = subprocess.run( | 1092 subp = subprocess.run( |
| 1054 cll, env=self.ourenv, shell=False, cwd=self.tooloutdir, stderr=tout, stdout=tout | 1093 cll, |
| 1094 env=self.ourenv, | |
| 1095 shell=False, | |
| 1096 cwd=self.tooloutdir, | |
| 1097 stderr=tout, | |
| 1098 stdout=tout, | |
| 1055 ) | 1099 ) |
| 1056 tout.close() | 1100 tout.close() |
| 1057 return subp.returncode | 1101 return subp.returncode |
| 1058 | |
| 1059 | 1102 |
| 1060 def writeShedyml(self): | 1103 def writeShedyml(self): |
| 1061 """for planemo""" | 1104 """for planemo""" |
| 1062 yuser = self.args.user_email.split("@")[0] | 1105 yuser = self.args.user_email.split("@")[0] |
| 1063 yfname = os.path.join(self.tooloutdir, ".shed.yml") | 1106 yfname = os.path.join(self.tooloutdir, ".shed.yml") |
| 1092 """move outputs into test-data and prepare the tarball""" | 1135 """move outputs into test-data and prepare the tarball""" |
| 1093 excludeme = "_planemo_test_report.html" | 1136 excludeme = "_planemo_test_report.html" |
| 1094 | 1137 |
| 1095 def exclude_function(tarinfo): | 1138 def exclude_function(tarinfo): |
| 1096 filename = tarinfo.name | 1139 filename = tarinfo.name |
| 1097 return ( | 1140 return None if filename.endswith(excludeme) else tarinfo |
| 1098 None | |
| 1099 if filename.endswith(excludeme) | |
| 1100 else tarinfo | |
| 1101 ) | |
| 1102 | 1141 |
| 1103 for p in self.outfiles: | 1142 for p in self.outfiles: |
| 1104 oname = p[ONAMEPOS] | 1143 oname = p[ONAMEPOS] |
| 1105 tdest = os.path.join(self.testdir, "%s_sample" % oname) | 1144 tdest = os.path.join(self.testdir, "%s_sample" % oname) |
| 1106 if not os.path.isfile(tdest): | 1145 if not os.path.isfile(tdest): |
| 1107 src = os.path.join(self.testdir,oname) | 1146 src = os.path.join(self.testdir, oname) |
| 1108 if os.path.isfile(src): | 1147 if os.path.isfile(src): |
| 1109 shutil.copyfile(src, tdest) | 1148 shutil.copyfile(src, tdest) |
| 1110 dest = os.path.join(self.repdir, "%s.sample" % (oname)) | 1149 dest = os.path.join(self.repdir, "%s.sample" % (oname)) |
| 1111 shutil.copyfile(src, dest) | 1150 shutil.copyfile(src, dest) |
| 1112 else: | 1151 else: |
| 1125 for entry in outs: | 1164 for entry in outs: |
| 1126 if not entry.is_file(): | 1165 if not entry.is_file(): |
| 1127 continue | 1166 continue |
| 1128 if "." in entry.name: | 1167 if "." in entry.name: |
| 1129 nayme, ext = os.path.splitext(entry.name) | 1168 nayme, ext = os.path.splitext(entry.name) |
| 1130 if ext in ['.yml','.xml','.json','.yaml']: | 1169 if ext in [".yml", ".xml", ".json", ".yaml"]: |
| 1131 ext = f'{ext}.txt' | 1170 ext = f"{ext}.txt" |
| 1132 else: | 1171 else: |
| 1133 ext = ".txt" | 1172 ext = ".txt" |
| 1134 ofn = "%s%s" % (entry.name.replace(".", "_"), ext) | 1173 ofn = "%s%s" % (entry.name.replace(".", "_"), ext) |
| 1135 dest = os.path.join(self.repdir, ofn) | 1174 dest = os.path.join(self.repdir, ofn) |
| 1136 src = os.path.join(self.tooloutdir, entry.name) | 1175 src = os.path.join(self.tooloutdir, entry.name) |
| 1137 shutil.copyfile(src, dest) | 1176 shutil.copyfile(src, dest) |
| 1138 with os.scandir(self.testdir) as outs: | 1177 with os.scandir(self.testdir) as outs: |
| 1139 for entry in outs: | 1178 for entry in outs: |
| 1140 if (not entry.is_file()) or entry.name.endswith('_sample') or entry.name.endswith("_planemo_test_report.html"): | 1179 if ( |
| 1180 (not entry.is_file()) | |
| 1181 or entry.name.endswith("_sample") | |
| 1182 or entry.name.endswith("_planemo_test_report.html") | |
| 1183 ): | |
| 1141 continue | 1184 continue |
| 1142 if "." in entry.name: | 1185 if "." in entry.name: |
| 1143 nayme, ext = os.path.splitext(entry.name) | 1186 nayme, ext = os.path.splitext(entry.name) |
| 1144 else: | 1187 else: |
| 1145 ext = ".txt" | 1188 ext = ".txt" |
| 1146 newname = f"{entry.name}{ext}" | 1189 newname = f"{entry.name}{ext}" |
| 1147 dest = os.path.join(self.repdir, newname) | 1190 dest = os.path.join(self.repdir, newname) |
| 1148 src = os.path.join(self.testdir, entry.name) | 1191 src = os.path.join(self.testdir, entry.name) |
| 1149 shutil.copyfile(src, dest) | 1192 shutil.copyfile(src, dest) |
| 1150 | |
| 1151 | 1193 |
| 1152 | 1194 |
| 1153 def main(): | 1195 def main(): |
| 1154 """ | 1196 """ |
| 1155 This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml as: | 1197 This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml as: |
| 1181 a("--edit_additional_parameters", action="store_true", default=False) | 1223 a("--edit_additional_parameters", action="store_true", default=False) |
| 1182 a("--parampass", default="positional") | 1224 a("--parampass", default="positional") |
| 1183 a("--tfout", default="./tfout") | 1225 a("--tfout", default="./tfout") |
| 1184 a("--new_tool", default="new_tool") | 1226 a("--new_tool", default="new_tool") |
| 1185 a("--galaxy_url", default="http://localhost:8080") | 1227 a("--galaxy_url", default="http://localhost:8080") |
| 1186 a( | 1228 a("--toolshed_url", default="http://localhost:9009") |
| 1187 "--toolshed_url", default="http://localhost:9009") | |
| 1188 # make sure this is identical to tool_sheds_conf.xml localhost != 127.0.0.1 so validation fails | 1229 # make sure this is identical to tool_sheds_conf.xml localhost != 127.0.0.1 so validation fails |
| 1189 a("--toolshed_api_key", default="fakekey") | 1230 a("--toolshed_api_key", default="fakekey") |
| 1190 a("--galaxy_api_key", default="fakekey") | 1231 a("--galaxy_api_key", default="fakekey") |
| 1191 a("--galaxy_root", default="/galaxy-central") | 1232 a("--galaxy_root", default="/galaxy-central") |
| 1192 a("--galaxy_venv", default="/galaxy_venv") | 1233 a("--galaxy_venv", default="/galaxy_venv") |
| 1205 args.additional_parameters[i] = args.additional_parameters[i].replace('"', "") | 1246 args.additional_parameters[i] = args.additional_parameters[i].replace('"', "") |
| 1206 r = ScriptRunner(args) | 1247 r = ScriptRunner(args) |
| 1207 r.writeShedyml() | 1248 r.writeShedyml() |
| 1208 r.makeTool() | 1249 r.makeTool() |
| 1209 if args.make_Tool == "generate": | 1250 if args.make_Tool == "generate": |
| 1210 retcode = r.run() # for testing toolfactory itself | 1251 retcode = r.run() # for testing toolfactory itself |
| 1211 r.moveRunOutputs() | 1252 r.moveRunOutputs() |
| 1212 r.makeToolTar() | 1253 r.makeToolTar() |
| 1213 else: | 1254 else: |
| 1214 r.planemo_biodocker_test() # test to make outputs and then test | 1255 r.planemo_biodocker_test() # test to make outputs and then test |
| 1215 r.moveRunOutputs() | 1256 r.moveRunOutputs() |
| 1216 r.makeToolTar() | 1257 r.makeToolTar() |
| 1217 if args.make_Tool == "gentestinstall": | 1258 if args.make_Tool == "gentestinstall": |
| 1218 r.shedLoad() | 1259 r.shedLoad() |
| 1219 r.eph_galaxy_load() | 1260 r.eph_galaxy_load() |
