changeset 0:02e4bb4fa10c draft

planemo upload for repository https://github.com/esg-epfl-apc/tools-astro/tree/main/tools commit 2991f65b25d4e6d1b69458603fce917adff40f94
author astroteam
date Mon, 19 Feb 2024 10:56:44 +0000
parents
children 593c4b45eda5
files Image.py Lightcurve.py Spectrum.py Spectrum_gammapy.py hess_astro_tool.xml
diffstat 5 files changed, 1105 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Image.py	Mon Feb 19 10:56:44 2024 +0000
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# flake8: noqa
+
+import json
+import os
+import shutil
+
+import matplotlib.pyplot as plt
+import numpy as np
+from astropy import wcs
+from astropy.coordinates import SkyCoord
+from astropy.io import fits
+from astropy.time import Time
+from numpy import cos, pi
+from oda_api.data_products import ImageDataProduct, PictureProduct
+from oda_api.json import CustomJSONEncoder
+
+if os.path.exists("hess_dl3_dr1.tar.gz") == False:
+    get_ipython().system(   # noqa: F821
+        "wget https://zenodo.org/record/1421099/files/hess_dl3_dr1.tar.gz"
+    )
+    get_ipython().system("tar -zxvf hess_dl3_dr1.tar.gz")   # noqa: F821
+
+src_name = "Crab"  # http://odahub.io/ontology#AstrophysicalObject
+RA = 83.628700  # http://odahub.io/ontology#PointOfInterestRA
+DEC = 22.014700  # http://odahub.io/ontology#PointOfInterestDEC
+T1 = "2000-10-09T13:16:00.0"  # http://odahub.io/ontology#StartTime
+T2 = "2022-10-10T13:16:00.0"  # http://odahub.io/ontology#EndTime
+Radius = 2.5  # http://odahub.io/ontology#AngleDegrees
+pixsize = (
+    0.1  # http://odahub.io/ontology#AngleDegrees ; oda:label "Pixel size"
+)
+Emin = 100.0  # http://odahub.io/ontology#Energy_GeV
+Emax = 10000.0  # http://odahub.io/ontology#Energy_GeV
+
+_galaxy_wd = os.getcwd()
+
+with open("inputs.json", "r") as fd:
+    inp_dic = json.load(fd)
+if "_data_product" in inp_dic.keys():
+    inp_pdic = inp_dic["_data_product"]
+else:
+    inp_pdic = inp_dic
+
+for vn, vv in inp_pdic.items():
+    if vn != "_selector":
+        globals()[vn] = type(globals()[vn])(vv)
+
+T1 = Time(T1, format="isot", scale="utc").mjd
+T2 = Time(T2, format="isot", scale="utc").mjd
+message = ""
+RA_pnts = []
+DEC_pnts = []
+DL3_files = []
+OBSIDs = []
+Tstart = []
+Tstop = []
+flist = os.listdir("data")
+for f in flist:
+    if f[-7:] == "fits.gz":
+        DL3_files.append(f)
+        OBSIDs.append(int(f[20:26]))
+        hdul = fits.open("data/" + f)
+        RA_pnts.append(float(hdul[1].header["RA_PNT"]))
+        DEC_pnts.append(float(hdul[1].header["DEC_PNT"]))
+        Tstart.append(
+            Time(
+                hdul[1].header["DATE-OBS"] + "T" + hdul[1].header["TIME-OBS"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        Tstop.append(
+            Time(
+                hdul[1].header["DATE-END"] + "T" + hdul[1].header["TIME-END"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        hdul.close()
+
+Coords_s = SkyCoord(RA, DEC, unit="degree")
+COORDS_pnts = SkyCoord(RA_pnts, DEC_pnts, unit="degree")
+seps = COORDS_pnts.separation(Coords_s).deg
+
+mask = np.where((seps < Radius) & (Tstart > T1) & (Tstop < T2))[0]
+OBSlist = []
+for i in mask:
+    OBSlist.append(DL3_files[i])
+if len(OBSlist) == 0:
+    message = "No data found"
+    raise RuntimeError("No data found")
+message
+
+cdec = cos(DEC * pi / 180.0)
+Npix = int(4 * Radius / pixsize) + 1
+RA_bins = np.linspace(RA - Radius / cdec, RA + Radius / cdec, Npix + 1)
+DEC_bins = np.linspace(DEC - Radius, DEC + Radius, Npix + 1)
+image = np.zeros((Npix, Npix))
+for f in OBSlist:
+    hdul = fits.open("data/" + f)
+    ev = hdul["EVENTS"].data
+    ev_ra = ev["RA"]
+    ev_dec = ev["DEC"]
+    ev_en = ev["ENERGY"]
+    ev_time = ev["TIME"]
+    h = np.histogram2d(ev_ra, ev_dec, bins=[RA_bins, DEC_bins])
+    image += h[0]
+    hdul.close()
+
+plt.imshow(
+    np.flip(image, axis=1),
+    extent=(RA_bins[-1], RA_bins[0], DEC_bins[0], DEC_bins[-1]),
+    origin="lower",
+)
+plt.colorbar()
+
+plt.xlabel("RA, degrees")
+plt.ylabel("DEC,degrees")
+plt.savefig("Image.png", format="png")
+
+# Create a new WCS object.  The number of axes must be set
+# from the start
+w = wcs.WCS(naxis=2)
+
+# Set up an "Airy's zenithal" projection
+# Vector properties may be set with Python lists, or Numpy arrays
+w.wcs.crpix = [Npix / 2.0, Npix / 2.0]
+w.wcs.cdelt = np.array([pixsize / cdec, pixsize])
+w.wcs.crval = [RA, DEC]
+w.wcs.ctype = ["RA---AIR", "DEC--AIR"]
+w.wcs.set_pv([(2, 1, 45.0)])
+
+# Now, write out the WCS object as a FITS header
+header = w.to_header()
+
+# header is an astropy.io.fits.Header object.  We can use it to create a new
+# PrimaryHDU and write it to a file.
+hdu = fits.PrimaryHDU(image, header=header)
+hdu.writeto("Image.fits", overwrite=True)
+hdu = fits.open("Image.fits")
+im = hdu[0].data
+from astropy.wcs import WCS
+
+wcs = WCS(hdu[0].header)
+plt.subplot(projection=wcs)
+plt.imshow(im, origin="lower")
+plt.grid(color="white", ls="solid")
+plt.xlabel("RA")
+plt.ylabel("Dec")
+
+bin_image = PictureProduct.from_file("Image.png")
+fits_image = ImageDataProduct.from_fits_file("Image.fits")
+
+picture = bin_image  # http://odahub.io/ontology#ODAPictureProduct
+image = fits_image  # http://odahub.io/ontology#Image
+
+# output gathering
+_galaxy_meta_data = {}
+_oda_outs = []
+_oda_outs.append(("out_Image_picture", "picture_galaxy.output", picture))
+_oda_outs.append(("out_Image_image", "image_galaxy.output", image))
+
+for _outn, _outfn, _outv in _oda_outs:
+    _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn)
+    if isinstance(_outv, str) and os.path.isfile(_outv):
+        shutil.move(_outv, _galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    elif getattr(_outv, "write_fits_file", None):
+        _outv.write_fits_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "fits"}
+    elif getattr(_outv, "write_file", None):
+        _outv.write_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    else:
+        with open(_galaxy_outfile_name, "w") as fd:
+            json.dump(_outv, fd, cls=CustomJSONEncoder)
+        _galaxy_meta_data[_outn] = {"ext": "json"}
+
+with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd:
+    json.dump(_galaxy_meta_data, fd)
+print("*** Job finished successfully ***")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Lightcurve.py	Mon Feb 19 10:56:44 2024 +0000
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# flake8: noqa
+
+import json
+import os
+import shutil
+
+import matplotlib.pyplot as plt
+import numpy as np
+from astropy.coordinates import SkyCoord
+from astropy.io import fits
+from astropy.time import Time
+from numpy import sqrt
+from oda_api.data_products import ODAAstropyTable, PictureProduct
+from oda_api.json import CustomJSONEncoder
+
+if os.path.exists("hess_dl3_dr1.tar.gz") == False:
+    get_ipython().system(   # noqa: F821
+        "wget https://zenodo.org/record/1421099/files/hess_dl3_dr1.tar.gz"
+    )
+    get_ipython().system("tar -zxvf hess_dl3_dr1.tar.gz")   # noqa: F821
+
+src_name = "Crab"  # http://odahub.io/ontology#AstrophysicalObject
+RA = 83.628700  # http://odahub.io/ontology#PointOfInterestRA
+DEC = 22.014700  # http://odahub.io/ontology#PointOfInterestDEC
+T1 = "2003-10-09T13:16:00.0"  # http://odahub.io/ontology#StartTime
+T2 = "2005-10-10T13:16:00.0"  # http://odahub.io/ontology#EndTime
+Radius = 2.5  # http://odahub.io/ontology#AngleDegrees
+R_s = 0.2  # http://odahub.io/ontology#AngleDegrees
+Emin = 100.0  # http://odahub.io/ontology#Energy_GeV
+Emax = 10000.0  # http://odahub.io/ontology#Energy_GeV
+NTbins = 10  # http://odahub.io/ontology#Integer
+
+_galaxy_wd = os.getcwd()
+
+with open("inputs.json", "r") as fd:
+    inp_dic = json.load(fd)
+if "_data_product" in inp_dic.keys():
+    inp_pdic = inp_dic["_data_product"]
+else:
+    inp_pdic = inp_dic
+
+for vn, vv in inp_pdic.items():
+    if vn != "_selector":
+        globals()[vn] = type(globals()[vn])(vv)
+
+T1 = Time(T1, format="isot", scale="utc").mjd
+T2 = Time(T2, format="isot", scale="utc").mjd
+message = ""
+RA_pnts = []
+DEC_pnts = []
+DL3_files = []
+OBSIDs = []
+Tstart = []
+Tstop = []
+flist = os.listdir("data")
+for f in flist:
+    if f[-7:] == "fits.gz":
+        DL3_files.append(f)
+        OBSIDs.append(int(f[20:26]))
+        hdul = fits.open("data/" + f)
+        RA_pnts.append(float(hdul[1].header["RA_PNT"]))
+        DEC_pnts.append(float(hdul[1].header["DEC_PNT"]))
+        Tstart.append(
+            Time(
+                hdul[1].header["DATE-OBS"] + "T" + hdul[1].header["TIME-OBS"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        Tstop.append(
+            Time(
+                hdul[1].header["DATE-END"] + "T" + hdul[1].header["TIME-END"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        hdul.close()
+
+Coords_s = SkyCoord(RA, DEC, unit="degree")
+COORDS_pnts = SkyCoord(RA_pnts, DEC_pnts, unit="degree")
+seps = COORDS_pnts.separation(Coords_s).deg
+
+mask = np.where((seps < Radius) & (Tstart > T1) & (Tstop < T2))[0]
+OBSlist = []
+Tbegs = []
+for i in mask:
+    OBSlist.append(DL3_files[i])
+    Tbegs.append(Tstart[i])
+if len(OBSlist) == 0:
+    message = "No data found"
+    raise RuntimeError("No data found")
+message
+
+Tbins = np.linspace(T1, T2, NTbins + 1)
+Tmin = Tbins[:-1]
+Tmax = Tbins[1:]
+Tmean = (Tmin + Tmax) / 2.0
+Tbins
+
+flux = np.zeros(NTbins)
+flux_err = np.zeros(NTbins)
+flux_b = np.zeros(NTbins)
+flux_b_err = np.zeros(NTbins)
+Expos = np.zeros(NTbins)
+for count, f in enumerate(OBSlist):
+    hdul = fits.open("data/" + f)
+    RA_pnt = hdul[1].header["RA_PNT"]
+    DEC_pnt = hdul[1].header["DEC_PNT"]
+    Texp = hdul[1].header["LIVETIME"]
+    Trun_start = hdul[1].header["TSTART"]
+    dRA = RA - RA_pnt
+    dDEC = DEC - DEC_pnt
+    RA_b = RA_pnt - dRA
+    DEC_b = DEC_pnt - dDEC
+    Coords_b = SkyCoord(RA_b, DEC_b, unit="degree")
+    Coords_pnt = SkyCoord(RA_pnt, DEC_pnt, unit="degree")
+    dist = Coords_pnt.separation(Coords_s).deg
+
+    ev = hdul["EVENTS"].data
+    ev_ra = ev["RA"]
+    ev_dec = ev["DEC"]
+    ev_en = ev["ENERGY"]
+    ev_time = (ev["TIME"] - Trun_start) / 86400.0 + Tbegs[count]
+    print(ev_time[0])
+    ev_coords = SkyCoord(ev_ra, ev_dec, unit="degree")
+    sep_s = ev_coords.separation(Coords_s).deg
+    sep_b = ev_coords.separation(Coords_b).deg
+
+    hdu = hdul["AEFF"].data
+    EEmin = hdu["ENERG_LO"][0]
+    EEmax = hdu["ENERG_HI"][0]
+    EE = sqrt(EEmin * EEmax)
+    EEbins = np.concatenate((EEmin, [EEmax[-1]]))
+    AA = hdu["EFFAREA"][0] + 1e-10
+    Thmin = hdu["THETA_LO"][0]
+    Thmax = hdu["THETA_HI"][0]
+    ind = np.argmin((Thmin - dist) ** 2)
+    AA = AA[ind] * Texp * 1e4
+    mask = np.where((sep_s < R_s))
+    cts1 = np.histogram2d(ev_time[mask], ev_en[mask], bins=[Tbins, EEbins])[0]
+    mask = sep_b < R_s
+    cts2 = np.histogram2d(ev_time[mask], ev_en[mask], bins=[Tbins, EEbins])[0]
+    src = cts1 - cts2
+    src_err = sqrt(cts1 + cts2)
+    flux += np.sum(src / AA, axis=1)
+    flux_err += np.sum(src_err / AA, axis=1)
+    flux_b += np.sum(cts2 / AA, axis=1)
+    flux_b_err += np.sum(sqrt(cts2) / AA, axis=1)
+    hdul.close()
+
+if message == "":
+    plt.errorbar(
+        Tmean,
+        flux,
+        yerr=flux_err,
+        xerr=[Tmean - Tmin, Tmax - Tmean],
+        linestyle="none",
+        label="source",
+    )
+    plt.errorbar(
+        Tmean,
+        flux_b,
+        yerr=flux_b_err,
+        xerr=[Tmean - Tmin, Tmax - Tmean],
+        linestyle="none",
+        label="background",
+    )
+    plt.xlabel("Time, MJD")
+    plt.ylabel("Flux, cts/cm$^2$s")
+    plt.yscale("log")
+    ymin = min(min(flux - flux_err), min(flux_b - flux_b_err))
+    ymax = max(max(flux + flux_err), max(flux_b + flux_b_err))
+    plt.ylim(ymin / 2.0, 2 * ymax)
+    plt.legend(loc="lower left")
+    plt.savefig("Lightcurve.png", format="png")
+
+if message == "":
+    bin_image = PictureProduct.from_file("Lightcurve.png")
+    from astropy.table import Table
+
+    data = [Tmean, Tmin, Tmax, flux, flux_err, flux_b, flux_b_err]
+    names = (
+        "Tmean[MJD]",
+        "Tmin[MJD]",
+        "Tmax[MJD]",
+        "Flux[counts/cm2s]",
+        "Flux_error[counts/cm2s]",
+        "Background[counts/cm2s]",
+        "Background_error[counts/cm2s]",
+    )
+    lc = ODAAstropyTable(Table(data, names=names))
+
+picture = bin_image  # http://odahub.io/ontology#ODAPictureProduct
+lightcurve_astropy_table = lc  # http://odahub.io/ontology#ODAAstropyTable
+
+# output gathering
+_galaxy_meta_data = {}
+_oda_outs = []
+_oda_outs.append(("out_Lightcurve_picture", "picture_galaxy.output", picture))
+_oda_outs.append(
+    (
+        "out_Lightcurve_lightcurve_astropy_table",
+        "lightcurve_astropy_table_galaxy.output",
+        lightcurve_astropy_table,
+    )
+)
+
+for _outn, _outfn, _outv in _oda_outs:
+    _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn)
+    if isinstance(_outv, str) and os.path.isfile(_outv):
+        shutil.move(_outv, _galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    elif getattr(_outv, "write_fits_file", None):
+        _outv.write_fits_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "fits"}
+    elif getattr(_outv, "write_file", None):
+        _outv.write_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    else:
+        with open(_galaxy_outfile_name, "w") as fd:
+            json.dump(_outv, fd, cls=CustomJSONEncoder)
+        _galaxy_meta_data[_outn] = {"ext": "json"}
+
+with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd:
+    json.dump(_galaxy_meta_data, fd)
+print("*** Job finished successfully ***")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Spectrum.py	Mon Feb 19 10:56:44 2024 +0000
@@ -0,0 +1,209 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# flake8: noqa
+
+import json
+import os
+import shutil
+
+import matplotlib.pyplot as plt
+import numpy as np
+from astropy.coordinates import SkyCoord
+from astropy.io import fits
+from astropy.time import Time
+from numpy import log10, sqrt
+from oda_api.data_products import ODAAstropyTable, PictureProduct
+from oda_api.json import CustomJSONEncoder
+
+if os.path.exists("hess_dl3_dr1.tar.gz") == False:
+    get_ipython().system(   # noqa: F821
+        "wget https://zenodo.org/record/1421099/files/hess_dl3_dr1.tar.gz"
+    )
+    get_ipython().system("tar -zxvf hess_dl3_dr1.tar.gz")   # noqa: F821
+
+# src_name='Crab' #http://odahub.io/ontology#AstrophysicalObject
+# RA = 83.628700  # http://odahub.io/ontology#PointOfInterestRA
+# DEC = 22.014700 # http://odahub.io/ontology#PointOfInterestDEC
+src_name = "PKS 2155-304"
+RA = 329.716938  # http://odahub.io/ontology#PointOfInterestRA
+DEC = -30.225588  # http://odahub.io/ontology#PointOfInterestDEC
+
+T1 = "2000-10-09T13:16:00.0"  # http://odahub.io/ontology#StartTime
+T2 = "2022-10-10T13:16:00.0"  # http://odahub.io/ontology#EndTime
+Radius = 2.5  # http://odahub.io/ontology#AngleDegrees
+R_s = 0.2  # http://odahub.io/ontology#AngleDegrees
+
+Emin = 100.0  # http://odahub.io/ontology#Energy_GeV
+Emax = 10000.0  # http://odahub.io/ontology#Energy_GeV
+NEbins = 20  # http://odahub.io/ontology#Integer
+
+_galaxy_wd = os.getcwd()
+
+with open("inputs.json", "r") as fd:
+    inp_dic = json.load(fd)
+if "_data_product" in inp_dic.keys():
+    inp_pdic = inp_dic["_data_product"]
+else:
+    inp_pdic = inp_dic
+
+for vn, vv in inp_pdic.items():
+    if vn != "_selector":
+        globals()[vn] = type(globals()[vn])(vv)
+
+Emin = Emin / 1e3
+Emax = Emax / 1e3
+Ebins = np.logspace(log10(Emin), log10(Emax), NEbins + 1)
+Ebins
+Emin = Ebins[:-1]
+Emax = Ebins[1:]
+Emean = sqrt(Emin * Emax)
+lgEmean = log10(Emean)
+
+T1 = Time(T1, format="isot", scale="utc").mjd
+T2 = Time(T2, format="isot", scale="utc").mjd
+message = ""
+RA_pnts = []
+DEC_pnts = []
+DL3_files = []
+OBSIDs = []
+Tstart = []
+Tstop = []
+flist = os.listdir("data")
+for f in flist:
+    if f[-7:] == "fits.gz":
+        DL3_files.append(f)
+        OBSIDs.append(int(f[20:26]))
+        hdul = fits.open("data/" + f)
+        RA_pnts.append(float(hdul[1].header["RA_PNT"]))
+        DEC_pnts.append(float(hdul[1].header["DEC_PNT"]))
+        Tstart.append(
+            Time(
+                hdul[1].header["DATE-OBS"] + "T" + hdul[1].header["TIME-OBS"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        Tstop.append(
+            Time(
+                hdul[1].header["DATE-END"] + "T" + hdul[1].header["TIME-END"],
+                format="isot",
+                scale="utc",
+            ).mjd
+        )
+        hdul.close()
+
+Coords_s = SkyCoord(RA, DEC, unit="degree")
+COORDS_pnts = SkyCoord(RA_pnts, DEC_pnts, unit="degree")
+seps = COORDS_pnts.separation(Coords_s).deg
+
+mask = np.where((seps < Radius) & (Tstart > T1) & (Tstop < T2))[0]
+OBSlist = []
+for i in mask:
+    OBSlist.append(DL3_files[i])
+if len(OBSlist) == 0:
+    message = "No data found"
+    raise RuntimeError("No data found")
+message
+
+cts_s = np.zeros(NEbins)
+cts_b = np.zeros(NEbins)
+Expos = np.zeros(NEbins)
+for f in OBSlist:
+    hdul = fits.open("data/" + f)
+    RA_pnt = hdul[1].header["RA_PNT"]
+    DEC_pnt = hdul[1].header["DEC_PNT"]
+    Texp = hdul[1].header["LIVETIME"]
+    dRA = RA - RA_pnt
+    dDEC = DEC - DEC_pnt
+    RA_b = RA_pnt - dRA
+    DEC_b = DEC_pnt - dDEC
+    Coords_b = SkyCoord(RA_b, DEC_b, unit="degree")
+    Coords_pnt = SkyCoord(RA_pnt, DEC_pnt, unit="degree")
+    dist = Coords_pnt.separation(Coords_s).deg
+
+    ev = hdul["EVENTS"].data
+    ev_ra = ev["RA"]
+    ev_dec = ev["DEC"]
+    ev_en = ev["ENERGY"]
+    ev_time = ev["TIME"]
+    ev_coords = SkyCoord(ev_ra, ev_dec, unit="degree")
+    sep_s = ev_coords.separation(Coords_s).deg
+    sep_b = ev_coords.separation(Coords_b).deg
+
+    hdu = hdul["AEFF"].data
+    EEmin = hdu["ENERG_LO"][0]
+    EEmax = hdu["ENERG_HI"][0]
+    lgEE = log10(sqrt(EEmin * EEmax))
+    lgAA = log10(hdu["EFFAREA"][0] + 1e-10)
+    Thmin = hdu["THETA_LO"][0]
+    Thmax = hdu["THETA_HI"][0]
+    ind = np.argmin((Thmin - dist) ** 2)
+    Expos += 10 ** (np.interp(lgEmean, lgEE, lgAA[ind])) * Texp
+    mask = sep_s < R_s
+    cts_s += np.histogram(ev_en[mask], bins=Ebins)[0]
+    mask = sep_b < R_s
+    cts_b += np.histogram(ev_en[mask], bins=Ebins)[0]
+    hdul.close()
+
+flux = (cts_s - cts_b) / (Emax - Emin) * Emax * Emin / (Expos * 1e4)
+flux_err = sqrt(cts_s + cts_b) / (Emax - Emin) * Emax * Emin / (Expos * 1e4)
+plt.errorbar(Emean, flux, yerr=flux_err, xerr=[Emean - Emin, Emax - Emean])
+plt.xscale("log")
+plt.yscale("log")
+plt.xlabel("$E$, TeV")
+plt.ylabel("$E^2 dN/dE$, erg/(cm$^2$s)")
+plt.savefig("Spectrum.png", format="png")
+
+bin_image = PictureProduct.from_file("Spectrum.png")
+from astropy.table import Table
+
+data = [Emean, Emin, Emax, flux, flux_err, cts_s, cts_b, Expos * 1e4]
+names = (
+    "Emean[TeV]",
+    "Emin[TeV]",
+    "Emax[TeV]",
+    "Flux[TeV/cm2s]",
+    "Flux_error[TeV/cm2s]",
+    "Cts_s",
+    "Cts_b",
+    "Exposure[cm2s]",
+)
+spec = ODAAstropyTable(Table(data, names=names))
+
+picture_png = bin_image  # http://odahub.io/ontology#ODAPictureProduct
+spectrum_astropy_table = spec  # http://odahub.io/ontology#ODAAstropyTable
+
+# output gathering
+_galaxy_meta_data = {}
+_oda_outs = []
+_oda_outs.append(
+    ("out_Spectrum_picture_png", "picture_png_galaxy.output", picture_png)
+)
+_oda_outs.append(
+    (
+        "out_Spectrum_spectrum_astropy_table",
+        "spectrum_astropy_table_galaxy.output",
+        spectrum_astropy_table,
+    )
+)
+
+for _outn, _outfn, _outv in _oda_outs:
+    _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn)
+    if isinstance(_outv, str) and os.path.isfile(_outv):
+        shutil.move(_outv, _galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    elif getattr(_outv, "write_fits_file", None):
+        _outv.write_fits_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "fits"}
+    elif getattr(_outv, "write_file", None):
+        _outv.write_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    else:
+        with open(_galaxy_outfile_name, "w") as fd:
+            json.dump(_outv, fd, cls=CustomJSONEncoder)
+        _galaxy_meta_data[_outn] = {"ext": "json"}
+
+with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd:
+    json.dump(_galaxy_meta_data, fd)
+print("*** Job finished successfully ***")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/Spectrum_gammapy.py	Mon Feb 19 10:56:44 2024 +0000
@@ -0,0 +1,266 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# flake8: noqa
+
+import json
+import os
+import shutil
+
+import astropy.units as u
+import matplotlib.pyplot as plt
+import numpy as np
+from astropy.coordinates import Angle, SkyCoord
+from astropy.time import Time
+from gammapy.data import DataStore
+from gammapy.datasets import (
+    Datasets,
+    FluxPointsDataset,
+    SpectrumDataset,
+    SpectrumDatasetOnOff,
+)
+from gammapy.makers import (
+    ReflectedRegionsBackgroundMaker,
+    SafeMaskMaker,
+    SpectrumDatasetMaker,
+)
+
+# from gammapy.makers.utils import make_theta_squared_table
+from gammapy.maps import MapAxis, RegionGeom, WcsGeom
+from oda_api.data_products import ODAAstropyTable, PictureProduct
+from oda_api.json import CustomJSONEncoder
+from regions import CircleSkyRegion
+
+hess_data = "gammapy-datasets/1.1/hess-dl3-dr1/"
+if not (os.path.exists(hess_data)):
+    get_ipython().system("gammapy download datasets")   # noqa: F821
+
+data_store = DataStore.from_dir(hess_data)
+
+# src_name='Crab' #http://odahub.io/ontology#AstrophysicalObject
+# RA = 83.628700  # http://odahub.io/ontology#PointOfInterestRA
+# DEC = 22.014700 # http://odahub.io/ontology#PointOfInterestDEC
+src_name = "PKS 2155-304"
+RA = 329.716938  # http://odahub.io/ontology#PointOfInterestRA
+DEC = -30.225588  # http://odahub.io/ontology#PointOfInterestDEC
+T1 = "2000-10-09T13:16:00.0"  # http://odahub.io/ontology#StartTime
+T2 = "2022-10-10T13:16:00.0"  # http://odahub.io/ontology#EndTime
+Radius = 2.5  # http://odahub.io/ontology#AngleDegrees
+R_s = 0.5  # http://odahub.io/ontology#AngleDegrees
+
+Emin = 100.0  # http://odahub.io/ontology#Energy_GeV
+Emax = 10000.0  # http://odahub.io/ontology#Energy_GeV
+NEbins = 20  # http://odahub.io/ontology#Integer
+
+_galaxy_wd = os.getcwd()
+
+with open("inputs.json", "r") as fd:
+    inp_dic = json.load(fd)
+if "_data_product" in inp_dic.keys():
+    inp_pdic = inp_dic["_data_product"]
+else:
+    inp_pdic = inp_dic
+
+for vn, vv in inp_pdic.items():
+    if vn != "_selector":
+        globals()[vn] = type(globals()[vn])(vv)
+
+T1 = Time(T1, format="isot", scale="utc").mjd
+T2 = Time(T2, format="isot", scale="utc").mjd
+
+dates1 = data_store.obs_table["DATE-OBS"]
+dates2 = data_store.obs_table["DATE-END"]
+times1 = data_store.obs_table["TIME-OBS"]
+times2 = data_store.obs_table["TIME-END"]
+OBSIDs = data_store.obs_table["OBS_ID"]
+Tstart = []
+Tstop = []
+for i in range(len(dates1)):
+    Tstart.append(
+        Time(dates1[i] + "T" + times1[i], format="isot", scale="utc").mjd
+    )
+    Tstop.append(
+        Time(dates2[i] + "T" + times2[i], format="isot", scale="utc").mjd
+    )
+
+RA_pnts = np.array(data_store.obs_table["RA_PNT"])
+DEC_pnts = np.array(data_store.obs_table["DEC_PNT"])
+
+Coords_s = SkyCoord(RA, DEC, unit="degree")
+COORDS_pnts = SkyCoord(RA_pnts, DEC_pnts, unit="degree")
+seps = COORDS_pnts.separation(Coords_s).deg
+
+mask = np.where((seps < Radius) & (Tstart > T1) & (Tstop < T2))[0]
+OBSlist = []
+obs_ids = OBSIDs[mask]
+if len(obs_ids) == 0:
+    message = "No data found"
+    raise RuntimeError("No data found")
+obs_ids
+
+observations = data_store.get_observations(obs_ids)
+
+target_position = Coords_s
+on_region_radius = Angle(str(R_s) + " deg")
+on_region = CircleSkyRegion(center=target_position, radius=on_region_radius)
+skydir = target_position.galactic
+geom = WcsGeom.create(
+    npix=(150, 150), binsz=0.05, skydir=skydir, proj="TAN", frame="icrs"
+)
+
+Emin = 100.0  # http://odahub.io/ontology#Energy_GeV
+Emax = 10000.0  # http://odahub.io/ontology#Energy_GeV
+NEbins = 20  # http://odahub.io/ontology#Integer
+
+energy_axis = MapAxis.from_energy_bounds(
+    Emin * 1e-3,
+    Emax * 1e-3,
+    nbin=NEbins,
+    per_decade=True,
+    unit="TeV",
+    name="energy",
+)
+energy_axis_true = MapAxis.from_energy_bounds(
+    0.05, 100, nbin=20, per_decade=True, unit="TeV", name="energy_true"
+)
+
+geom = RegionGeom.create(region=on_region, axes=[energy_axis])
+dataset_empty = SpectrumDataset.create(
+    geom=geom, energy_axis_true=energy_axis_true
+)
+
+dataset_maker = SpectrumDatasetMaker(
+    containment_correction=True, selection=["counts", "exposure", "edisp"]
+)
+bkg_maker = ReflectedRegionsBackgroundMaker()
+safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10)
+
+datasets = Datasets()
+
+for obs_id, observation in zip(obs_ids, observations):
+    dataset = dataset_maker.run(
+        dataset_empty.copy(name=str(obs_id)), observation
+    )
+    dataset_on_off = bkg_maker.run(dataset, observation)
+    # dataset_on_off = safe_mask_masker.run(dataset_on_off, observation)
+    datasets.append(dataset_on_off)
+
+print(datasets)
+
+from pathlib import Path
+
+path = Path("spectrum_analysis")
+path.mkdir(exist_ok=True)
+
+for dataset in datasets:
+    dataset.write(
+        filename=path / f"obs_{dataset.name}.fits.gz", overwrite=True
+    )
+
+datasets = Datasets()
+
+for obs_id in obs_ids:
+    filename = path / f"obs_{obs_id}.fits.gz"
+    datasets.append(SpectrumDatasetOnOff.read(filename))
+
+from gammapy.modeling import Fit
+from gammapy.modeling.models import ExpCutoffPowerLawSpectralModel, SkyModel
+
+spectral_model = ExpCutoffPowerLawSpectralModel(
+    amplitude=1e-12 * u.Unit("cm-2 s-1 TeV-1"),
+    index=2,
+    lambda_=0.1 * u.Unit("TeV-1"),
+    reference=1 * u.TeV,
+)
+model = SkyModel(spectral_model=spectral_model, name="crab")
+
+datasets.models = [model]
+
+fit_joint = Fit()
+result_joint = fit_joint.run(datasets=datasets)
+
+# we make a copy here to compare it later
+model_best_joint = model.copy()
+
+print(result_joint)
+
+display(result_joint.models.to_parameters_table())   # noqa: F821
+
+e_min, e_max = Emin * 1e-3, Emax * 1e-3
+energy_edges = np.geomspace(e_min, e_max, NEbins) * u.TeV
+
+from gammapy.estimators import FluxPointsEstimator
+
+fpe = FluxPointsEstimator(
+    energy_edges=energy_edges, source="crab", selection_optional="all"
+)
+flux_points = fpe.run(datasets=datasets)
+
+flux_points_dataset = FluxPointsDataset(
+    data=flux_points, models=model_best_joint
+)
+flux_points_dataset.plot_fit()
+# plt.show()
+plt.savefig("Spectrum.png", format="png", bbox_inches="tight")
+
+res = flux_points.to_table(sed_type="dnde", formatted=True)
+np.array(res["dnde"])
+
+bin_image = PictureProduct.from_file("Spectrum.png")
+from astropy.table import Table
+
+Emean = np.array(res["e_ref"])
+Emin = np.array(res["e_min"])
+Emax = np.array(res["e_max"])
+flux = Emean**2 * np.array(res["dnde"])
+flux_err = Emean**2 * np.array(res["dnde_err"])
+data = [Emean, Emin, Emax, flux, flux_err]
+names = (
+    "Emean[TeV]",
+    "Emin[TeV]",
+    "Emax[TeV]",
+    "Flux[TeV/cm2s]",
+    "Flux_error[TeV/cm2s]",
+)
+spec = ODAAstropyTable(Table(data, names=names))
+
+picture_png = bin_image  # http://odahub.io/ontology#ODAPictureProduct
+spectrum_astropy_table = spec  # http://odahub.io/ontology#ODAAstropyTable
+
+# output gathering
+_galaxy_meta_data = {}
+_oda_outs = []
+_oda_outs.append(
+    (
+        "out_Spectrum_gammapy_picture_png",
+        "picture_png_galaxy.output",
+        picture_png,
+    )
+)
+_oda_outs.append(
+    (
+        "out_Spectrum_gammapy_spectrum_astropy_table",
+        "spectrum_astropy_table_galaxy.output",
+        spectrum_astropy_table,
+    )
+)
+
+for _outn, _outfn, _outv in _oda_outs:
+    _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn)
+    if isinstance(_outv, str) and os.path.isfile(_outv):
+        shutil.move(_outv, _galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    elif getattr(_outv, "write_fits_file", None):
+        _outv.write_fits_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "fits"}
+    elif getattr(_outv, "write_file", None):
+        _outv.write_file(_galaxy_outfile_name)
+        _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
+    else:
+        with open(_galaxy_outfile_name, "w") as fd:
+            json.dump(_outv, fd, cls=CustomJSONEncoder)
+        _galaxy_meta_data[_outn] = {"ext": "json"}
+
+with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd:
+    json.dump(_galaxy_meta_data, fd)
+print("*** Job finished successfully ***")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/hess_astro_tool.xml	Mon Feb 19 10:56:44 2024 +0000
@@ -0,0 +1,217 @@
+<tool id="hess_astro_tool" name="HESS" version="0.0.1+galaxy0" profile="23.0">
+  <requirements>
+    <requirement type="package" version="8.21.0">ipython</requirement>
+    <requirement type="package" version="5.3.4">astropy</requirement>
+    <requirement type="package" version="1.11.4">scipy</requirement>
+    <requirement type="package" version="1.10.13">pydantic</requirement>
+    <requirement type="package" version="4.66.2">tqdm</requirement>
+    <requirement type="package" version="1.1">gammapy</requirement>
+    <requirement type="package" version="3.8.3">matplotlib</requirement>
+    <requirement type="package" version="1.2.12">oda-api</requirement>
+    <!--Requirements string 'nb2workflow[cwl,service,rdf,mmoda]>=1.3.30 
+' can't be converted automatically. Please add the galaxy/conda requirement manually or modify the requirements file!-->
+    <requirement type="package" version="7.16.0">nbconvert</requirement>
+    <requirement type="package" version="1.20.3">wget</requirement>
+  </requirements>
+  <command detect_errors="exit_code">ipython '$__tool_directory__/${_data_product._selector}.py'</command>
+  <configfiles>
+    <inputs name="inputs" filename="inputs.json" />
+  </configfiles>
+  <inputs>
+    <conditional name="_data_product">
+      <param name="_selector" type="select" label="Data Product">
+        <option value="Image" selected="true">Image</option>
+        <option value="Spectrum" selected="false">Spectrum</option>
+        <option value="Spectrum_gammapy" selected="false">Spectrum_gammapy</option>
+        <option value="Lightcurve" selected="false">Lightcurve</option>
+      </param>
+      <when value="Image">
+        <param name="src_name" type="text" value="Crab" label="src_name" />
+        <param name="RA" type="float" value="83.6287" label="RA Units: deg" />
+        <param name="DEC" type="float" value="22.0147" label="DEC Units: deg" />
+        <param name="T1" type="text" value="2000-10-09T13:16:00.0" label="T1" />
+        <param name="T2" type="text" value="2022-10-10T13:16:00.0" label="T2" />
+        <param name="Radius" type="float" value="2.5" label="Radius Units: deg" />
+        <param name="pixsize" type="float" value="0.1" label="Pixel size Units: deg" />
+        <param name="Emin" type="float" value="100.0" label="Emin Units: GeV" />
+        <param name="Emax" type="float" value="10000.0" label="Emax Units: GeV" />
+      </when>
+      <when value="Spectrum">
+        <param name="src_name" type="text" value="PKS 2155-304" label="src_name" />
+        <param name="RA" type="float" value="329.716938" label="RA Units: deg" />
+        <param name="DEC" type="float" value="-30.225588" label="DEC Units: deg" />
+        <param name="T1" type="text" value="2000-10-09T13:16:00.0" label="T1" />
+        <param name="T2" type="text" value="2022-10-10T13:16:00.0" label="T2" />
+        <param name="Radius" type="float" value="2.5" label="Radius Units: deg" />
+        <param name="R_s" type="float" value="0.2" label="R_s Units: deg" />
+        <param name="Emin" type="float" value="100.0" label="Emin Units: GeV" />
+        <param name="Emax" type="float" value="10000.0" label="Emax Units: GeV" />
+        <param name="NEbins" type="integer" value="20" label="NEbins" />
+      </when>
+      <when value="Spectrum_gammapy">
+        <param name="src_name" type="text" value="PKS 2155-304" label="src_name" />
+        <param name="RA" type="float" value="329.716938" label="RA Units: deg" />
+        <param name="DEC" type="float" value="-30.225588" label="DEC Units: deg" />
+        <param name="T1" type="text" value="2000-10-09T13:16:00.0" label="T1" />
+        <param name="T2" type="text" value="2022-10-10T13:16:00.0" label="T2" />
+        <param name="Radius" type="float" value="2.5" label="Radius Units: deg" />
+        <param name="R_s" type="float" value="0.5" label="R_s Units: deg" />
+        <param name="Emin" type="float" value="100.0" label="Emin Units: GeV" />
+        <param name="Emax" type="float" value="10000.0" label="Emax Units: GeV" />
+        <param name="NEbins" type="integer" value="20" label="NEbins" />
+      </when>
+      <when value="Lightcurve">
+        <param name="src_name" type="text" value="Crab" label="src_name" />
+        <param name="RA" type="float" value="83.6287" label="RA Units: deg" />
+        <param name="DEC" type="float" value="22.0147" label="DEC Units: deg" />
+        <param name="T1" type="text" value="2003-10-09T13:16:00.0" label="T1" />
+        <param name="T2" type="text" value="2005-10-10T13:16:00.0" label="T2" />
+        <param name="Radius" type="float" value="2.5" label="Radius Units: deg" />
+        <param name="R_s" type="float" value="0.2" label="R_s Units: deg" />
+        <param name="Emin" type="float" value="100.0" label="Emin Units: GeV" />
+        <param name="Emax" type="float" value="10000.0" label="Emax Units: GeV" />
+        <param name="NTbins" type="integer" value="10" label="NTbins" />
+      </when>
+    </conditional>
+  </inputs>
+  <outputs>
+    <data label="${tool.name} -&gt; Image picture" name="out_Image_picture" format="auto" from_work_dir="picture_galaxy.output">
+      <filter>_data_product['_selector'] == 'Image'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Image image" name="out_Image_image" format="auto" from_work_dir="image_galaxy.output">
+      <filter>_data_product['_selector'] == 'Image'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Spectrum picture_png" name="out_Spectrum_picture_png" format="auto" from_work_dir="picture_png_galaxy.output">
+      <filter>_data_product['_selector'] == 'Spectrum'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Spectrum spectrum_astropy_table" name="out_Spectrum_spectrum_astropy_table" format="auto" from_work_dir="spectrum_astropy_table_galaxy.output">
+      <filter>_data_product['_selector'] == 'Spectrum'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Spectrum_gammapy picture_png" name="out_Spectrum_gammapy_picture_png" format="auto" from_work_dir="picture_png_galaxy.output">
+      <filter>_data_product['_selector'] == 'Spectrum_gammapy'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Spectrum_gammapy spectrum_astropy_table" name="out_Spectrum_gammapy_spectrum_astropy_table" format="auto" from_work_dir="spectrum_astropy_table_galaxy.output">
+      <filter>_data_product['_selector'] == 'Spectrum_gammapy'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Lightcurve picture" name="out_Lightcurve_picture" format="auto" from_work_dir="picture_galaxy.output">
+      <filter>_data_product['_selector'] == 'Lightcurve'</filter>
+    </data>
+    <data label="${tool.name} -&gt; Lightcurve lightcurve_astropy_table" name="out_Lightcurve_lightcurve_astropy_table" format="auto" from_work_dir="lightcurve_astropy_table_galaxy.output">
+      <filter>_data_product['_selector'] == 'Lightcurve'</filter>
+    </data>
+  </outputs>
+  <tests>
+    <test expect_num_outputs="2">
+      <conditional name="_data_product">
+        <param name="_selector" value="Image" />
+        <param name="src_name" value="Crab" />
+        <param name="RA" value="83.6287" />
+        <param name="DEC" value="22.0147" />
+        <param name="T1" value="2000-10-09T13:16:00.0" />
+        <param name="T2" value="2022-10-10T13:16:00.0" />
+        <param name="Radius" value="2.5" />
+        <param name="pixsize" value="0.1" />
+        <param name="Emin" value="100.0" />
+        <param name="Emax" value="10000.0" />
+      </conditional>
+      <assert_stdout>
+        <has_text text="*** Job finished successfully ***" />
+      </assert_stdout>
+    </test>
+    <test expect_num_outputs="2">
+      <conditional name="_data_product">
+        <param name="_selector" value="Spectrum" />
+        <param name="src_name" value="PKS 2155-304" />
+        <param name="RA" value="329.716938" />
+        <param name="DEC" value="-30.225588" />
+        <param name="T1" value="2000-10-09T13:16:00.0" />
+        <param name="T2" value="2022-10-10T13:16:00.0" />
+        <param name="Radius" value="2.5" />
+        <param name="R_s" value="0.2" />
+        <param name="Emin" value="100.0" />
+        <param name="Emax" value="10000.0" />
+        <param name="NEbins" value="20" />
+      </conditional>
+      <assert_stdout>
+        <has_text text="*** Job finished successfully ***" />
+      </assert_stdout>
+    </test>
+    <test expect_num_outputs="2">
+      <conditional name="_data_product">
+        <param name="_selector" value="Spectrum_gammapy" />
+        <param name="src_name" value="PKS 2155-304" />
+        <param name="RA" value="329.716938" />
+        <param name="DEC" value="-30.225588" />
+        <param name="T1" value="2000-10-09T13:16:00.0" />
+        <param name="T2" value="2022-10-10T13:16:00.0" />
+        <param name="Radius" value="2.5" />
+        <param name="R_s" value="0.5" />
+        <param name="Emin" value="100.0" />
+        <param name="Emax" value="10000.0" />
+        <param name="NEbins" value="20" />
+      </conditional>
+      <assert_stdout>
+        <has_text text="*** Job finished successfully ***" />
+      </assert_stdout>
+    </test>
+    <test expect_num_outputs="2">
+      <conditional name="_data_product">
+        <param name="_selector" value="Lightcurve" />
+        <param name="src_name" value="Crab" />
+        <param name="RA" value="83.6287" />
+        <param name="DEC" value="22.0147" />
+        <param name="T1" value="2003-10-09T13:16:00.0" />
+        <param name="T2" value="2005-10-10T13:16:00.0" />
+        <param name="Radius" value="2.5" />
+        <param name="R_s" value="0.2" />
+        <param name="Emin" value="100.0" />
+        <param name="Emax" value="10000.0" />
+        <param name="NTbins" value="10" />
+      </conditional>
+      <assert_stdout>
+        <has_text text="*** Job finished successfully ***" />
+      </assert_stdout>
+    </test>
+  </tests>
+  <help>This service provides analysis of publicly available sample &#8220;Data Level
+3&#8221; (DL3) data of HESS gamma-ray telescope, described by `Hess
+Collaboration (2018) &lt;https://arxiv.org/abs/1810.04516&gt;`__. Three types
+of data products are generated: sky images, source lightcurves and
+spectra.
+
+The sky images are count maps produced by histogramming of the events in
+sky coordinates (Right Ascention and Declinaiton), in the energy range
+that can be set by adjusting the ``Emin`` and ``Emax`` parameters and in
+the time range that can be adjusted setting the ``T1`` (start time) and
+``T2`` (stop time) parameters.
+
+The lightcurves are produced by hystrogramming of the events in time, in
+the number ``NTbins`` of time intervals of equalt time width between
+``T1`` (start time) and ``T2`` (stop time). The events are selected in a
+desired energy range between ``Emin`` and ``Emax`` from a circular
+region of the radius ``R_s`` (in degrees) around the source position
+``RA``,\ ``DEC``. Conversion of the counts to the physical flux units is
+done by dividing by the exposure time and effective area that is
+extracted from the Instrument Response Functions (IRF).
+
+For the spectra, two alternative tools are considered. The service
+``Spectrum`` performs histogramming of the events in energy, in the
+number ``NEbins`` of energy bins homogeneously spaces in logarithm of
+energy, beterrn ``Emin`` and ``Emax``. Conversion of the counts to the
+physical flux units is done by dividing by the exposure time and
+effective area that is extracted from the IRF. This method does not take
+into account the energy bias and can result in a wrong spectral shape at
+the low-energy threshold where the bias is strongest.
+
+An alternative spectral extraction is done using
+`Gammapy &lt;https://gammapy.org/&gt;`__, following the script `Spectrum
+Analysis &lt;https://docs.gammapy.org/0.18.2/tutorials/spectrum_analysis.html&gt;`__.
+It performs forward folding of the spectral model (a cut-off powerlaw by
+default) with the IRF and fits the folded model to the binned count data
+in the energy range between ``Emin`` and ``Emax`` in ``NEbins``
+logarithmically spaced energy bins.
+</help>
+  <citations>
+    <citation type="doi">10.5281/zenodo.1421098</citation>
+  </citations>
+</tool>
\ No newline at end of file