Mercurial > repos > mnhn65mo > netcdf_handler
changeset 0:8da8ec7da45f draft default tip
Uploaded
author | mnhn65mo |
---|---|
date | Thu, 02 Aug 2018 09:24:38 -0400 |
parents | |
children | |
files | netcdf_metadata_info.xml netcdf_read.py netcdf_read.xml |
diffstat | 3 files changed, 754 insertions(+), 0 deletions(-) [+] |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/netcdf_metadata_info.xml Thu Aug 02 09:24:38 2018 -0400 @@ -0,0 +1,77 @@ +<tool id="netcdf-metadata_info" name="Netcdf Metadata Info" version="0.1.0"> + <description>summarize content of a nc file</description> + <requirements> + <requirement type="package" version="1.1.6">netcdf-metadata-info</requirement> + </requirements> + <command detect_errors="exit_code"><![CDATA[ + netcdf-metadata-info '$input' + && + while read -r l;do + a=\$(echo \$l | cut -d' ' -f1);echo \$l>dimensions_\$a + ;done <variables.tabular + && + rm dimensions_VariableName + && + for f in dimensions_*; do cat \$f | sed 's/ /\t\n/g' | sed '\$s/$/ /' >\$f.tabular ; done + && + for f in dimensions_*.tabular;do + awk 'NR % 2 != 0' \$f > \$f.2 + && + sed 1d \$f.2 > \$f + && + rm \$f.2 + ;done + && + ncdump -h '$input' > '$info' + ]]></command> + <inputs> + <param type="data" name="input" label="Netcdf file" format="netcdf,h5" help="Netcdf file you need information about."/> + </inputs> + <outputs> +<!-- + <data name="var_tabs" format="tabular"> + <discover_datasets pattern="__designation_and_ext__" visible="true"/> + <discover_datasets pattern="conda_activate.log" visible="false"/> + </data> +--> + <data name="output" format="tabular" label="Metadata infos from ${input.name}.Variables.tab" from_work_dir="variables.tabular"/> + <data name="info" label="info file" format="txt"/> + </outputs> + + <help><![CDATA[ +**What it does** + +First the tool will give general information about the input in a 'info file' output. (command $ncdump -h inputfile) + +Then, a general tabular 'variables' summarize dimensions details inside each available variable. + + +The summary tabular file has the general structure : + + + Variable1 Var1_Number_of_Dim Dim1 Dim1_size ... DimN DimN_size + + VariableX VarX_Number_of_Dim DimX1 DimX1_size ... DimXN DimXN_size + + ... + + +**Input** + +A netcdf file (xxx.nc). + +**Outputs** + +An Information file. + +A summary tabular file. + + +-------------------------------- + +The Netcdf Info tool use the netcdf functions : https://www.unidata.ucar.edu/software/netcdf/docs/index.html + +Run this tool before considering using Netcdf Read. + ]]></help> + +</tool>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/netcdf_read.py Thu Aug 02 09:24:38 2018 -0400 @@ -0,0 +1,421 @@ +import netCDF4 +from netCDF4 import Dataset +import numpy as np +import matplotlib +matplotlib.use("Agg") +import matplotlib.pyplot as plt +from pylab import * +import sys +import os +from scipy import spatial +from math import radians, cos, sin, asin, sqrt +import itertools + +##################### +##################### + +def checklist(dim_list, dim_name, filtre, threshold): + if not dim_list: + error="Error "+str(dim_name)+" has no value "+str(filtre)+" "+str(threshold) + sys.exit(error) + + +#Return dist in km between two coord +#Thx to : https://stackoverflow.com/questions/4913349/haversine-formula-in-python-bearing-and-distance-between-two-gps-points +def haversine(lon1, lat1, lon2, lat2): + """ + Calculate the great circle distance between two points + on the earth (specified in decimal degrees) + """ + # convert decimal degrees to radians + lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) + + # haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 + c = 2 * asin(sqrt(a)) + r = 6371 # Radius of earth in kilometers. Use 3956 for miles + return c * r + + +#Comparison functions, return a list of indexes for the user conditions +def is_strict_inf(filename, dim_name, threshold): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] < threshold: + list_dim.append(i) + checklist(list_dim,dim_name,"<",threshold) + return list_dim + +def is_equal_inf(filename, dim_name, threshold): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] <= threshold: + list_dim.append(i) + checklist(list_dim,dim_name,"<=",threshold) + return list_dim + +def is_equal_sup(filename, dim_name, threshold): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] >= threshold: + list_dim.append(i) + checklist(list_dim,dim_name,">=",threshold) + return list_dim + +def is_strict_sup(filename, dim_name, threshold): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] > threshold: + list_dim.append(i) + checklist(list_dim,dim_name,">",threshold) + return list_dim + +def find_nearest(array,value): + index = (np.abs(array-value)).argmin() + return index + +def is_equal(filename, dim_name, value): + try: + index=filename.variables[dim_name][:].tolist().index(value) + except: + index=find_nearest(filename.variables[dim_name][:],value) + return index + +def is_between_include(filename, dim_name, threshold1, threshold2): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] >= threshold1 and filename.variables[dim_name][i] <= threshold2: + list_dim.append(i) + checklist(list_dim,dim_name,">=",threshold1) + checklist(list_dim,dim_name,"=<",threshold2) + return list_dim + +def is_between_exclude(filename, dim_name, threshold1, threshold2): + list_dim=[] + for i in range(0,filename.variables[dim_name].size): + if filename.variables[dim_name][i] > threshold1 and filename.variables[dim_name][i] < threshold2: + list_dim.append(i) + checklist(list_dim,dim_name,">",threshold1) + checklist(list_dim,dim_name,"<",threshold2) + return list_dim + +####################### +####################### + +#Get args +#Get Input file +inputfile=Dataset(sys.argv[1]) +var_file_tab=sys.argv[2] +var=sys.argv[3] #Var chosen by user + +Coord_bool=False + + +###################### +###################### +#len_threshold=1000000 +len_threshold=7000 +x_percent=0.75 +threshold_latlon=100 + + +#Check if coord is passed as parameter +arg_n=len(sys.argv)-1 +if(((arg_n-3)%3)!=0): + Coord_bool=True #Useful to get closest coord + arg_n=arg_n-4 #Number of arg minus lat & lon + name_dim_lat=str(sys.argv[-4]) + name_dim_lon=str(sys.argv[-2]) + value_dim_lat=float(sys.argv[-3]) + value_dim_lon=float(sys.argv[-1]) + + #Get all lat & lon + #try: + if True: + latitude=np.ma.MaskedArray(inputfile.variables[name_dim_lat]) + longitude=np.ma.MaskedArray(inputfile.variables[name_dim_lon]) + lat=latitude;lon=longitude #Usefull to keep the originals lat/lon vect before potentially resize it bellow. + len_all_coord=len(lat)*len(lon) + + #print("len all coord "+str(len_all_coord)+" threshold "+str(len_threshold)) + + #To avoid case when all_coord is to big and need to much memory + #If the vector is too big, reduce it to its third in a loop until its < to the threshold + while len_all_coord > len_threshold: + + if len(lat)<threshold_latlon: #If lat and lon are very different and lon is >> than lat. This way only lon is reduce and not lat. + x_percent_len_lat=99999999 + else: + x_percent_len_lat=int(x_percent*len(lat)) + + if len(lon)<threshold_latlon: #If lat and lon are very different and lat is >> than lon. This way only lat is reduce and not lon. + x_percent_len_lon=99999999 + else: + x_percent_len_lon=int(x_percent*len(lon)) + + #print("len(lat) :"+str(len(lat))+" x_percent_len_lat "+str(x_percent_len_lat)) + #print("len(lon) :"+str(len(lon))+" x_percent_len_lon "+str(x_percent_len_lon)) + + + pos_lat_user=find_nearest(lat,value_dim_lat) + pos_lon_user=find_nearest(lon,value_dim_lon) + + + #This part is to avoid having a vector that start bellow 0 + lat_reduced=int(pos_lat_user-x_percent_len_lat/2-1) + if lat_reduced<0: + lat_reduced=0 + lon_reduced=int(pos_lon_user-x_percent_len_lon/2-1) + if lon_reduced<0: + lon_reduced=0 + #Opposite here to avoid having vector with len > to len(vector) + lat_extended=int(pos_lat_user+x_percent_len_lat/2-1) + if lat_extended>len(lat): + lat_extended=len(lat) + lon_extended=int(pos_lon_user+x_percent_len_lon/2-1) + if lon_extended>len(lon): + lon_extended=len(lon) + + lat=lat[lat_reduced:lat_extended] #add a test to check if pos_lat_user-x_percent_len_lat/2-1 >0 + lon=lon[lon_reduced:lon_extended] + #print("latreduced : "+str(lat_reduced)+" latextended "+str(lat_extended)) + #print("lonreduced : "+str(lon_reduced)+" lonextended "+str(lon_extended)) + #print("lat : "+str(lat)) + #print("lon : "+str(lon)) + len_all_coord=len(lat)*len(lon) + + #print ("len_all_coord : "+str(len_all_coord)+". len_lat : "+str(len(lat))+" .len_lon : "+str(len(lon))) + + else: + #except: + sys.exit("Latitude & Longitude not found") + + #Set all lat-lon pair avaible in list_coord + list_coord_dispo=[] + for i in lat: + for j in lon: + list_coord_dispo.append(i);list_coord_dispo.append(j) + + #Reshape + all_coord=np.reshape(list_coord_dispo,(lat.size*lon.size,2)) + #np.set_printoptions(threshold='nan')#to print full vec + #print(str(all_coord)) + noval=True + + + +######################### +######################### + + +#Get the file of variables and number of dims : var.tab +var_file=open(var_file_tab,"r") #read +lines=var_file.readlines() #line +dim_names=[] +for line in lines: #for every lines + words=line.split() + if (words[0]==var): #When line match user input var + varndim=int(words[1]) #Get number of dim for the var + for dim in range(2,varndim*2+2,2): #Get dim names + dim_names.append(words[dim]) + #print ("Chosen var : "+sys.argv[3]+". Number of dimensions : "+str(varndim)+". Dimensions : "+str(dim_names)) #Standard msg + + +######################## +######################## + + +#Use a dictionary to save every lists of indexes +my_dic={} ##d["string{0}".format(x)] + +for i in range(4,arg_n,3): + #print("\nDimension name : "+sys.argv[i]+" action : "+sys.argv[i+1]+" .Value : "+sys.argv[i+2]+"\n") #Standard msg + + #Check if the dim selected for filtering is present in the var dimensions. + if (sys.argv[i] not in dim_names): + print("Warning ! "+sys.argv[i]+" is not a dimension of "+var+".\nThis filter will be skipped\nCheck in the file \"variables\" the dimensions available.\n\n") + pass + + my_dic["string{0}".format(i)]="list_index_dim" + my_dic_index="list_index_dim"+str(sys.argv[i]) #Possible improvement: Check if lon/lat are not parsed again + + #Apply every user filter. Call function and return list of index wich validate condition for every dim. + if (sys.argv[i+1]=="l"): #< + my_dic[my_dic_index]=is_strict_inf(inputfile, sys.argv[i], float(sys.argv[i+2])) + if (sys.argv[i+1]=="le"): #<= + my_dic[my_dic_index]=is_equal_inf(inputfile, sys.argv[i], float(sys.argv[i+2])) + if (sys.argv[i+1]=="g"): #> + my_dic[my_dic_index]=is_strict_sup(inputfile, sys.argv[i], float(sys.argv[i+2])) + if (sys.argv[i+1]=="ge"): #>= + my_dic[my_dic_index]=is_equal_sup(inputfile, sys.argv[i], float(sys.argv[i+2])) + if (sys.argv[i+1]=="e"): #== + my_dic[my_dic_index]=is_equal(inputfile, sys.argv[i], float(sys.argv[i+2])) + if (sys.argv[i+1]==":"): #all + my_dic[my_dic_index]=np.arange(inputfile.variables[sys.argv[i]].size) + if (sys.argv[i+1]=="be"): #between_exclude + #Get the 2 thresholds from the arg which looks like "threshold1-threshold2" + threshold1=sys.argv[i+2].split("-")[0] + threshold2=sys.argv[i+2].split("-")[1] + my_dic[my_dic_index]=is_between_exclude(inputfile, sys.argv[i], float(threshold1), float(threshold2)) + if (sys.argv[i+1]=="bi"): #between_include + #Get the 2 thresholds from the arg which looks like "threshold1-threshold2" + threshold1=sys.argv[i+2].split("-")[0] + threshold2=sys.argv[i+2].split("-")[1] + my_dic[my_dic_index]=is_between_include(inputfile, sys.argv[i], float(threshold1), float(threshold2)) + +##################### +##################### + + +#If precise coord given. +if Coord_bool: + while noval: #While no closest coord with valid values is found + #Return closest coord avaible + tree=spatial.KDTree(all_coord) + closest_coord=(tree.query([(value_dim_lat,value_dim_lon)])) + cc_index=closest_coord[1] + + closest_lat=float(all_coord[closest_coord[1]][0][0]) + closest_lon=float(all_coord[closest_coord[1]][0][1]) + + #Get coord index into dictionary + my_dic_index="list_index_dim"+str(name_dim_lat) + my_dic[my_dic_index]=latitude.tolist().index(closest_lat) + + my_dic_index="list_index_dim"+str(name_dim_lon) + my_dic[my_dic_index]=longitude.tolist().index(closest_lon) + + + #All dictionary are saved in the string exec2 which will be exec(). Value got are in vec2 + exec2="vec2=inputfile.variables['"+var+"'][" + first=True + for i in dim_names: #Every dim are in the right order + if not first: + exec2=exec2+"," + dimension_indexes="my_dic[\"list_index_dim"+i+"\"]" #new dim, custom name dic + try: #If some error or no specific user choices; every indexes are used for the selected dim. + exec(dimension_indexes) + except: + dimension_indexes=":" + exec2=exec2+dimension_indexes #Concatenate dim + first=False #Not the first element now + exec2=exec2+"]" + #print exec2 #To check integrity of the string + exec(exec2) #Execution, value are in vec2. + #print vec2 #Get the value, standard output + + #Check integrity of vec2. We don't want NA values + i=0 + #Check every value, if at least one non NA is found vec2 and the current closest coords are validated + vecsize=vec2.size + #print (str(vecsize)) + if vecsize>1: + while i<vecsize: + #print (str(vec2)) + if vec2[i]!="nan": + break + else: + i=i+1 + else: + if vec2!="nan": + break + else: + i=i+1 + + if i<vecsize: #There is at least 1 nonNA value + noval=False + else: #If only NA : pop the closest coord and search in the second closest coord in the next loop. + all_coord=np.delete(all_coord,cc_index,0) + + +#Same as before, dictionary use in exec2. exec(exec2) give vec2 and the values wanted. +else: + exec2="vec2=inputfile.variables['"+str(sys.argv[3])+"'][" + first=True + for i in dim_names: #Respect order + if not first: + exec2=exec2+"," + dimension_indexes="my_dic[\"list_index_dim"+i+"\"]" + try: #Avoid error and exit + exec(dimension_indexes) + except: + dimension_indexes=":" + exec2=exec2+dimension_indexes + first=False + exec2=exec2+"]" + exec(exec2) + + +######################## +######################## + + +#This part create the header of every value. +#Values of every dim from the var is saved in a list : b[]. +#All the lists b are saved in the unique list a[] +#All the combinations of the dim values inside a[] are the headers of the vec2 values + +#Also write dim_name into a file to make clear header. +fo=open("header_names",'w') + +a=[] +for i in dim_names: + try: #If it doesn't work here its because my_dic= : so there is no size. Except will direcly take size of the dim. + size_dim=inputfile[i][my_dic['list_index_dim'+i]].size + except: + size_dim=inputfile[i].size + my_dic['list_index_dim'+i]=range(size_dim) + + #print (i,size_dim) #Standard msg + b=[] + #Check size is useful since b.append(inputfile[i][my_dic['list_index_dim'+i][0]]) won't work + if size_dim>1: + for s in range(0,size_dim): + b.append(inputfile[i][my_dic['list_index_dim'+i][s]]) + #print (i,inputfile[i][my_dic['list_index_dim'+i][s]]) + else: + b.append(inputfile[i][my_dic['list_index_dim'+i]]) + #print (i,inputfile[i][my_dic['list_index_dim'+i]]) + + a.append(b) + fo.write(i+"\t") +if Coord_bool: + fo.write("input_lat\t"+"input_lon\t") +fo.write(var+"\n") +fo.close() + + +###################### +###################### + + +#Write header in file +fo=open("header",'w') +for combination in itertools.product(*a): + if Coord_bool: + fo.write(str(combination)+"_"+str(value_dim_lat)+"_"+str(value_dim_lon)+"\t") + else: + fo.write(str(combination)+"\t") +fo.write("\n") +fo.close() + + +#Write vec2 in a tabular formated file +fo=open("sortie.tabular",'w') +#print(str(vec2)) +try: + vec2.tofile(fo,sep="\t",format="%s") +except: + vec3=np.ma.filled(vec2,np.nan) + vec3.tofile(fo,sep="\t",format="%s") +fo.close() + + +###################### +###################### + + +#Final sweet msg +print (var+" values successffuly extracted from "+sys.argv[1]+" !")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/netcdf_read.xml Thu Aug 02 09:24:38 2018 -0400 @@ -0,0 +1,256 @@ +<tool id="netcdf_read" name="Netcdf Reader" version="0.2.0"> + <description>extracts variable values with custom conditions on dimensions</description> + <requirements> + <requirement type="package" version="2.1.0">matplotlib</requirement> + <requirement type="package" version="1.3.1">netCDF4</requirement> + <requirement type="package" version="1.0.0">scipy</requirement> + <requirement type="package" version="1.1.0">datamash</requirement> + </requirements> + <command detect_errors="exit_code"><![CDATA[ + mkdir output_dir && + + #if $condi_source_coord.coord_source=="coord_from_file" + i=0 && + re='^[-+]?[0-9]+\.?[0-9]*$' && + while read line; do + lat=\$(echo \$line | cut -d' ' -f1) + lon=\$(echo \$line | cut -d' ' -f2) + && + if ! [[ \$lat =~ \$re ]] || ! [[ \$lon =~ \$re ]] ; then continue ;fi + && + i=\$((\$i+1)) && + python '$__tool_directory__/netcdf_read.py' '$input' '$var_tab' $var + #for $i,$uc in enumerate($user_choice) + #if $uc.condi_between.comparator=="bi" + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.t1}-${uc.condi_between.t2} + #elif $uc.condi_between.comparator=="be" + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.t1}-${uc.condi_between.t2} + #else + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.value} + #end if + #end for + '$condi_source_coord.lat_dim' + \$lat + '$condi_source_coord.lon_dim' + \$lon + + && + cat 'header' | sed 's/array(\[//g' | sed 's/], dtype=float32)//g'| sed 's/,\s/_/g' | sed 's/(//g' | sed 's/)//g' > 'header_cleaned' + && + cat 'header_cleaned' 'sortie.tabular' > 'supersortie.tabular' + && + datamash transpose < 'supersortie.tabular' > 'supersortie_transposed.tabular' + && + sed -i 's/_/\t/g' 'supersortie_transposed.tabular' + && + cat 'header_names' 'supersortie_transposed.tabular' | sed 's/\s/\t/g' > 'output_dir/coord'\$i'.tabular'; + done<'$coord_tabular' + + #else + + python '$__tool_directory__/netcdf_read.py' '$input' '$var_tab' $var + #for $i,$uc in enumerate($user_choice) + #if $uc.condi_between.comparator=="bi" + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.t1}-${uc.condi_between.t2} + #elif $uc.condi_between.comparator=="be" + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.t1}-${uc.condi_between.t2} + #else + ${uc.dim} ${uc.condi_between.comparator} ${uc.condi_between.value} + #end if + #end for + #if $condi_source_coord.condi_coord.coord=='yes_cust_coord' + $condi_source_coord.condi_coord.lat_dim $condi_source_coord.condi_coord.lat_val $condi_source_coord.condi_coord.lon_dim $condi_source_coord.condi_coord.lon_val + #end if + && + cat 'header' | sed 's/array(\[//g' | sed 's/], dtype=float32)//g'| sed 's/,\s/_/g' | sed 's/(//g' | sed 's/)//g' > 'header_cleaned' + && + cat 'header_cleaned' 'sortie.tabular' > 'supersortie.tabular' + && + datamash transpose < 'supersortie.tabular' > 'supersortie_transposed.tabular' + && + sed -i 's/_/\t/g' 'supersortie_transposed.tabular' + && + cat 'header_names' 'supersortie_transposed.tabular' | sed 's/\s/\t/g' > 'final.tabular' + + #end if + + + ]]></command> + <inputs> + <param type="data" name="input" label="Input netcdf file" format="netcdf,h5"/> + <param type="data" label="Tabular of variables" name="var_tab" format="tabular" help="Select the tabular file which summarize the available variables and dimensions."/> + + <param name="var" type="select" label="Chose the variable to extract"> + <options from_dataset="var_tab"> + <column name="name" index="0"/> + <column name="value" index="0"/> + <column name="n_dim" index="1"/> + </options> + </param> + + <conditional name="condi_source_coord"> + <param name="coord_source" type="select" label="Source of coordinates"> + <option value="coord_from_file">Use coordinates from input file</option> + <option value="coord_from_stdin">Manually enter coordinate</option> + </param> + + <when value="coord_from_file"> + <param type="data" label="Tabular of coord" name="coord_tabular" format="tabular" help="Format : Latitude Longitude"/> + <param name="lat_dim" type="select" label="Select latitude" > + <options from_dataset="var_tab"> + <column name="value" index="0"/> + </options> + </param> + <param name="lon_dim" type="select" label="Select longitude" > + <options from_dataset="var_tab"> + <column name="value" index="0"/> + </options> + </param> + </when> + + <when value="coord_from_stdin"> + <conditional name="condi_coord"> + <param name="coord" type="boolean" label="Search values for custom coordinates" truevalue="yes_cust_coord" checked="true" falsevalue="nope" help="Use this option to get valid values at your custom coordinates. If only NA values are availables the tool will search for the next closest coordinate until valid values."/> + <when value="yes_cust_coord"> + <param name="lat_dim" type="select" label="Select latitude" > + <options from_dataset="var_tab"> + <column name="value" index="0"/> + </options> + </param> + <param name="lat_val" type="float" value="0" label="Latitude"/> + <param name="lon_dim" type="select" label="Select longitude" > + <options from_dataset="var_tab"> + <column name="value" index="0"/> + </options> + </param> + <param name="lon_val" type="float" value="0" label="Longitude"/> + </when> + + <when value="nope"></when> + </conditional> + </when> + + </conditional> + + <repeat name="user_choice" title="Filter"> + <param name="dim" type="select" label="Dimensions"> + <options from_dataset="var_tab"> + <column name="value" index="0"/> + </options> + </param> + <conditional name="condi_between"> + <param name="comparator" type="select" label="Comparator"> + <option value="e">Equal</option> + <option value="g">Greater</option> + <option value="ge">Greater or equal</option> + <option value="l">Less</option> + <option value="le">Less or equal</option> + <option value="be">Between-exclude ]threshold1,threshold2[</option> + <option value="bi">Between-include [threshold1,threshold2]</option> + </param> + <when value="bi"> + <param name="t1" type="float" value="0" label="Inferior threshold"/> + <param name="t2" type="float" value="0" label="Superior threshold"/> + </when> + <when value="be"> + <param name="t1" type="float" value="0" label="Inferior threshold"/> + <param name="t2" type="float" value="0" label="Superior threshold"/> + </when> + <when value="e"> + <param name="value" type="float" value="0" label="Value"/> + </when> + <when value="g"> + <param name="value" type="float" value="0" label="Value"/> + </when> + <when value="ge"> + <param name="value" type="float" value="0" label="Value"/> + </when> + <when value="l"> + <param name="value" type="float" value="0" label="Value"/> + </when> + <when value="le"> + <param name="value" type="float" value="0" label="Value"/> + </when> + </conditional> + </repeat> + + </inputs> + + <outputs> + <collection type="list" name="output"> + <discover_datasets pattern="__designation_and_ext__" visible="false" format="tabular" directory="output_dir"/> + <filter>condi_source_coord['coord_source'] == 'coord_from_file'</filter> + </collection> + <data name="simpleoutput" from_work_dir="final.tabular" format="tabular"> + <filter>condi_source_coord['coord_source'] == 'coord_from_stdin'</filter> + </data> + </outputs> + + + <tests> + <test> + <param name="input" value="dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc"/> + <param name="var_tab" value="var_tab_dataset-ibi"/> + <param name="var" value="phy"/> + <param name="dim_tab" value="tab_dim_phy_dataset-ibi"/> + <param name="coord" value="yes_cut_coord"/> + <param name="lat_dim" value="latitude"/> + <param name="lat_val" value="44.0"/> + <param name="lon_dim" value="longitude"/> + <param name="lon_val" value="-2.0"/> + <param name="output" value="Test1.tabular"/> + </test> + <test> + <param name="input" value="dataset-ibi-reanalysis-bio-005-003-monthly-regulargrid_1510914389133.nc"/> + <param name="var_tab" value="var_tab_dataset-ibi"/> + <param name="var" value="nh4"/> + <param name="dim_tab" value="tab_dim_nh4_dataset-ibi"/> + <param name="coord" value="nope"/> + <param name="dim" value="time"/> + <param name="comparator" value="e"/> + <param name="value" value="7272.0"/> + <param name="dim" value="latitude"/> + <param name="comparator" value="ge"/> + <param name="value" value="45.0"/> + <param name="output" value="Test2.tabular"/> + </test> + + + + + </tests> + + <help><![CDATA[ +**What it does** + +This tool extracts variable values with custom conditions on dimensions. + +It can use manualy given coordinates or automaticaly take them from a tabular file to filter informations. + +If no values are availables at a coordinate X, the tool will search the closest coordinate with a non NA value. + +Filter can be set on every dimension. Available filtering operations are : =, >, <, >=, <=, [interval], ]interval[. + + + +**Input** + +A netcdf file (.nc). + +Variable tabular file from 'Netcdf Metadate Info'. + +Tabular file with coordinates and the following structure : 'lat' 'lon'. + + +**Outputs** + +A single output with values for the wanted variable if there is only one coordinate. + +A data collection where one file is created for every coordinate, if multiple coordinates from tabular file. + + +------------------------------------------------- + +The Netcdf Read tool can be used after the Netcdf Info. + ]]></help> +</tool>