Mercurial > repos > bgruening > 3dtrees_segmentanytree
comparison segmentanytree.xml @ 0:6f80b2dfb7d4 draft default tip
planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/3dtrees_segmentanytree commit 7540181d97a24b5bf07dde308f6d79bee5f18c9c
| author | bgruening |
|---|---|
| date | Thu, 30 Oct 2025 14:37:55 +0000 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| -1:000000000000 | 0:6f80b2dfb7d4 |
|---|---|
| 1 <tool id="3dtrees_segmentanytree" name="3Dtrees: SegmentAnyTree" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="24.2"> | |
| 2 <description> | |
| 3 Forest instance segmentation. | |
| 4 </description> | |
| 5 <macros> | |
| 6 <token name="@TOOL_VERSION@">1.1.0</token> | |
| 7 <token name="@VERSION_SUFFIX@">0</token> | |
| 8 </macros> | |
| 9 <requirements> | |
| 10 <container type="docker"> | |
| 11 ghcr.io/3dtrees-earth/3dtrees_sat:@TOOL_VERSION@ | |
| 12 </container> | |
| 13 </requirements> | |
| 14 <command detect_errors="exit_code"><![CDATA[ | |
| 15 export NUMBA_CACHE_DIR=\$TMPDIR && | |
| 16 #if $input.ext == "zip": | |
| 17 ln -s '$input' input.zip && | |
| 18 python3.8 -u /src/run.py | |
| 19 --dataset-path input.zip | |
| 20 --output-dir . | |
| 21 --log_file '$log_file' && | |
| 22 mv processed_files.zip '$output' | |
| 23 #if $log_file: | |
| 24 && mv resource_usage.log '$resource_usage' | |
| 25 #end if | |
| 26 #else: | |
| 27 ln -s '$input' input.laz && | |
| 28 python3.8 -u /src/run.py | |
| 29 --dataset-path input.laz | |
| 30 --output-dir . | |
| 31 --log_file '$log_file' && | |
| 32 mv segmented_pc.laz '$output' | |
| 33 #if $log_file: | |
| 34 && mv resource_usage.log '$resource_usage' | |
| 35 #end if | |
| 36 #end if | |
| 37 ]]> | |
| 38 </command> | |
| 39 <inputs> | |
| 40 <param argument="--input" type="data" format="zip,laz" label="Input Dataset" help="ZIP file containing point cloud data with required folder structure / single LAZ/LAS point cloud file."/> | |
| 41 <param argument="--log_file" type="boolean" label="Resource log" help="If set to true, it returns a log file containing the CPU, RAM and GPU usage statistics"/> | |
| 42 </inputs> | |
| 43 <outputs> | |
| 44 <data name="output" format="zip" label="Processed Files"> | |
| 45 <change_format> | |
| 46 <when input="input.ext" value="laz" format="laz"/> | |
| 47 </change_format> | |
| 48 </data> | |
| 49 <data name="resource_usage" format="txt" label="Resource Usage"> | |
| 50 <filter>log_file</filter> | |
| 51 </data> | |
| 52 </outputs> | |
| 53 <tests> | |
| 54 <test expect_num_outputs="2"> | |
| 55 <param name="input" value="prepared_files_mikro.zip"/> | |
| 56 <param name="log_file" value="true"/> | |
| 57 <output name="resource_usage"> | |
| 58 <assert_contents> | |
| 59 <has_line line="timestamp,cpu_percent,cpu_cores_used,cpu_cores_total,mem_used_mb,mem_total_mb,gpu_mem_used_mb,gpu_mem_total_mb" n="1"/> | |
| 60 </assert_contents> | |
| 61 </output> | |
| 62 <assert_stdout> | |
| 63 <has_text text="Loading checkpoint from /src/SegmentAnyTree/model_file/PointGroup-PAPER.pt"/> | |
| 64 <has_text text="Segmentation complete"/> | |
| 65 </assert_stdout> | |
| 66 </test> | |
| 67 <test expect_exit_code="1" expect_failure="true"> | |
| 68 <param name="input" value="mikro.laz"/> | |
| 69 <param name="log_file" value="false"/> | |
| 70 <assert_stderr> | |
| 71 <has_text text="RuntimeError: Found no NVIDIA driver"/> | |
| 72 </assert_stderr> | |
| 73 <assert_stdout> | |
| 74 <has_text text="Loading checkpoint from /src/SegmentAnyTree/model_file/PointGroup-PAPER.pt"/> | |
| 75 </assert_stdout> | |
| 76 </test> | |
| 77 </tests> | |
| 78 <help format="markdown"> | |
| 79 **What it does** | |
| 80 | |
| 81 This tool performs deep learning-based tree segmentation on LiDAR point clouds using the **SegmentAnyTree** algorithm (Wielgosz et al., 2024). It is sensor- and platform-agnostic, working across airborne (ALS/ULS), terrestrial (TLS), and mobile (MLS) laser scanning data. | |
| 82 | |
| 83 ----- | |
| 84 | |
| 85 | |
| 86 **Input** | |
| 87 - A **ZIP file** containing the following directory structure: | |
| 88 | |
| 89 00_original/ | |
| 90 input.laz | |
| 91 01_subsampled/ | |
| 92 input_subsampled.laz | |
| 93 02_input_SAT/ | |
| 94 tile_1.laz | |
| 95 tile_2.laz | |
| 96 ... | |
| 97 | |
| 98 - A single **LAZ/LAS file** containing a point cloud | |
| 99 | |
| 100 ----- | |
| 101 | |
| 102 **Output** | |
| 103 | |
| 104 The output depends on the input type: | |
| 105 - **ZIP input** → ZIP output containing the complete processed folder structure. | |
| 106 - **LAZ input** → Single LAZ output file `segmented_pc.laz` containing the segmented point cloud with tree instance IDs. | |
| 107 | |
| 108 We recommend using the [3DTrees: Tile and Merge](https://usegalaxy.eu/?tool_id=toolshed.g2.bx.psu.edu%2Frepos%2Fbgruening%2F3dtrees_tile_merge%2F3dtrees_tile_merge%2F1.0.1%2Bgalaxy0&version=latest) tool to preprocess and remap the segmented point cloud back to original resolution. | |
| 109 | |
| 110 </help> | |
| 111 <creator> | |
| 112 <person name="Marc Wielgosz" email="maciej.wielgosz@nibio.no" url="https://maciej.wielgosz.info/"/> | |
| 113 <person name="Kilian Gerberding" email="kilian.gerberding@geosense.uni-freiburg.de" url="https://orcid.org/0009-0002-5001-2571"/> | |
| 114 <organization name="3Dtrees-Team, University of Freiburg" url="https://github.com/3dTrees-earth"/> | |
| 115 </creator> | |
| 116 <citations> | |
| 117 <citation type="doi">10.1016/j.rse.2024.114367</citation> | |
| 118 <citation type="bibtex"> | |
| 119 @misc{3dtrees_segmentanytree, title = {3Dtrees: SegmentAnyTree}, author = {3Dtrees-Project}, year = {2025}} | |
| 120 </citation> | |
| 121 </citations> | |
| 122 </tool> |
