changeset 1:1b6736f119c9 draft

planemo upload for repository https://github.com/jaidevjoshi83/gaiac.git commit a733fec1915a1f6cf5f07092dc33426f765b6cb3-dirty
author jay
date Thu, 15 May 2025 22:50:53 +0000
parents 7c9552ed8676
children abc767b6d4f4
files gaiac_pm_data_pulling/datapulling.py gaiac_pm_data_pulling/gaiac_pm_data_pulling.py gaiac_pm_data_pulling/test-data/host.txt gaiac_pm_data_pulling/test-data/input1.tsv
diffstat 4 files changed, 136 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gaiac_pm_data_pulling/datapulling.py	Thu May 15 22:50:53 2025 +0000
@@ -0,0 +1,29 @@
+import requests
+import sys
+
+def download_files_from_server(urls):
+    """
+    Function to download files from a FastAPI server.
+
+    Args:
+        ip (str): The IP address of the FastAPI server.
+        file_names (list): List of file names to download.
+
+    Returns:
+        None
+    """
+    for url in urls:
+        file_name = url.split('/')[len(url.split('/'))-1]
+        response = requests.get(url)
+        if response.status_code == 200:
+            with open(f"{file_name}", "wb") as f:
+                f.write(response.content)
+            print(f"File {file_name} downloaded successfully!")
+        else:
+            print(f"Failed to download {file_name}. Status code: {response.status_code}")
+
+if __name__=="__main__":
+
+    if len(sys.argv) > 1:
+        files  = sys.argv[1].split(',')
+        download_files_from_server( files)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gaiac_pm_data_pulling/gaiac_pm_data_pulling.py	Thu May 15 22:50:53 2025 +0000
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+
+
+import argparse
+import logging
+from base64 import urlsafe_b64encode
+from html import escape
+from urllib.parse import urljoin
+import requests
+
+log = logging.getLogger("tools.iuc.data_managers.data_manager_refgenie_pull")
+
+import urllib.request
+import json
+
+def get_file_list(ip):
+
+    url = "%s/list-files"%(ip)
+
+    with urllib.request.urlopen(url) as response:
+        data = response.read()
+        file_list = json.loads(data)
+
+    return file_list
+
+
+def simulate_dynamic_options(ip):
+    try:
+        rval = []
+
+        api_data = {ip:{
+
+        }}
+
+        for i in get_file_list(ip):
+            # print("http://"+ip+'/download/'+i)
+            api_data[ip][i] = ip+'/download/'+i
+
+
+        ul = []
+        for dataset, url in api_data[ip].items():
+  
+            ul.append({
+                'name': dataset,
+                'value': url,
+                'options': [],
+                'selected': False
+            })
+        
+        # for urlname, genomes in api_data.items():
+        #     # URL-safe base64 encoding for the URL name
+        #     urlname_64 = urlname
+        #     ul = []
+        #     for genome, assets in genomes.items():
+        #         al = []
+
+        #         ul.append({
+        #             'name': genome,
+        #             'value': genome,
+        #             'options': [],
+        #             'selected': False
+        #         })
+
+        return ul
+    except Exception as e:
+        # Handle any potential errors
+        return [{
+            'name': escape(str(e)),
+            'value': 'ERROR',
+            'options': [],
+            'selected': False
+        }]
+
+if __name__ == '__main__':
+
+    simulate_dynamic_options('http://192.168.1.202:8080')
+
+#    for i in simulate_dynamic_options('192.168.1.202:8080')[0]['options']:
+#        print(i)
+    #    print(i)
+    # parser = argparse.ArgumentParser()
+    # parser.add_argument('-n', '--names', dest='names', action='store', default=None, help='Table names to reload')
+    # parser.add_argument('-u', '--url', dest='url', action='store', default=None, help='Base url for reload')
+    # parser.add_argument('-k', '--key', dest='key', action='store', default=None, help='Galaxy API Key')
+    # parser.add_argument('-g', '--graceful', dest='graceful', action='store_true', help='Fail gracefully')
+
+    # args = parser.parse_args()
+    # try:
+    #     if not args.names:
+    #         tables = requests.get(urljoin(args.url, "api/tool_data"), params={'key': args.key}).json()
+    #         args.names = [d.get('name') for d in tables]
+    #     for name in args.names:
+    #         print(requests.get(urljoin(args.url, "api/tool_data/%s/reload" % (name)), params={'key': args.key}).json())
+    # except Exception as e:
+    #     if args.graceful:
+    #         print("Failed to reload data tables:\n%s" % (e))
+    #     else:
+    #         raise e
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gaiac_pm_data_pulling/test-data/host.txt	Thu May 15 22:50:53 2025 +0000
@@ -0,0 +1,2 @@
+http://192.168.1.202:8080
+http://192.168.1.202:8081
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gaiac_pm_data_pulling/test-data/input1.tsv	Thu May 15 22:50:53 2025 +0000
@@ -0,0 +1,7 @@
+date_time	Temparature1	Humidity1
+2019-06-07 13:28:00	39	50.471
+2019-06-07 13:29:00	39	51
+2019-06-07 13:30:00	39.588	49.647
+2019-06-07 13:31:00	38.9	50
+2019-06-07 13:32:00	39	51
+2019-06-07 13:33:00	38.9	50.8