Mercurial > repos > mikel-egana-aranguren > oppl
diff OPPL/inference.xml @ 11:6ca67b155e32
Imports simplified, new tool for inference added
author | Mikel Egaña Aranguren <mikel-egana-aranguren@toolshed.g2.bx.psu.edu> |
---|---|
date | Fri, 09 Mar 2012 16:15:27 +0100 |
parents | |
children | 68c4ae500a13 |
line wrap: on
line diff
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/OPPL/inference.xml Fri Mar 09 16:15:27 2012 +0100 @@ -0,0 +1,80 @@ +<tool id="inference" name="Perform inference in an OWL ontology" version="1.0.1"> + <description>It performs inference in an OWL ontology and it generates a new ontology with the inferred axioms as asserted axioms</description> + + <!-- For big ontologies I use -Xmx7000M -Xms250M -DentityExpansionLimit=1000000000 If that's too much for your machine simply delete or modify at will, but since Galaxy is usually used in a server setting it makes sense to use a big chunk of memory --> + + <command> + java -Xmx7000M -Xms250M -DentityExpansionLimit=1000000000 -jar ${__tool_data_path__}/shared/jars/inference.jar $input $reasoner $axioms > $output + </command> + + <inputs> + <param format="text" name="input" type="data" label="Input ontology file"/> + <param name="reasoner" type="select" label="Choose reasoner"> + <option value="Pellet" selected="true">Pellet</option> + <option value="HermiT">HermiT</option> + <option value="FaCTPlusPlus">FaCT++</option> + </param> + <param name="axioms" type="select" display="checkboxes" multiple="true" label="Select what axioms to add as asserted"> + <option value="CLASS_ASSERTIONS">CLASS_ASSERTIONS</option> + <option value="CLASS_HIERARCHY">CLASS_HIERARCHY</option> + <option value="DATA_PROPERTY_HIERARCHY">DATA_PROPERTY_HIERARCHY</option> + <option value="DISJOINT_CLASSES">DISJOINT_CLASSES</option> + <option value="OBJECT_PROPERTY_HIERARCHY">OBJECT_PROPERTY_HIERARCHY</option> + </param> + </inputs> + <outputs> + <data format="text" name="output" /> + </outputs> + <tests> + <test> + <param name="input" value="test.owl"/> + <param name="reasoner" value="Pellet"/> + <param name="axioms" value="CLASS_ASSERTIONS,CLASS_HIERARCHY,OBJECT_PROPERTY_HIERARCHY"/> + <output name="out_file" file="test_new.owl"/> + </test> + </tests> + <help> + +**About Inference-Galaxy** + + Inference galaxy offers the possibility of performing automated reasoning in an ontology and then injecting the inferred axioms as asserted axioms, generating a new OWL ontology. + +**Formats** + + Inference-Galaxy uses the OWL API, and therefore it can load any ontology format that such API is able to load: OBO flat file, OWL (RDF/XML, OWL/XML, Functional, Manchester), turtle, and KRSS. The output is OWL (RDF/XML). + +**Usage** + + An ontology is needed as input: load it with Get Data >> Upload File from your computer or redirect the output of another galaxy tool. In case the ontology includes imports, they should be resolvable. + + The reasoner can be Pellet, HermiT or FaCT++. + + The inferred axioms to add as asserted axioms can be chosen. + +**More information** + + Galaxy public instances with Inference-Galaxy pre-installed: + + http://sele.inf.um.es:8080/ + + http://linkeddata2.dia.fi.upm.es:8080 + + Links of interest: + + http://owlapi.sourceforge.net/ + + http://www.w3.org/TR/owl2-manchester-syntax/ + + http://clarkparsia.com/pellet + + http://hermit-reasoner.com/ + + http://code.google.com/p/factplusplus/ + +**Contact** + + Please send any request or comment to mikel.egana.aranguren@gmail.com. + + </help> + +</tool>