Mercurial > repos > astroteam > cta_astro_tool
comparison pre-defined_model.py @ 0:2f3e314c3dfa draft default tip
planemo upload for repository https://github.com/esg-epfl-apc/tools-astro/tree/main/tools commit 4543470805fc78f6cf2604b9d55beb6f06359995
author | astroteam |
---|---|
date | Fri, 19 Apr 2024 10:06:21 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:2f3e314c3dfa |
---|---|
1 #!/usr/bin/env python | |
2 # coding: utf-8 | |
3 | |
4 # flake8: noqa | |
5 | |
6 import json | |
7 import os | |
8 import shutil | |
9 | |
10 from oda_api.json import CustomJSONEncoder | |
11 | |
12 get_ipython().run_cell_magic( # noqa: F821 | |
13 "bash", | |
14 "", | |
15 'wget https://gitlab.renkulab.io/astronomy/mmoda/cta/-/raw/master/Franceschini17.txt\n\nrm -r IRFS | echo "Ok"\nmkdir IRFS\ncd IRFS\nwget https://zenodo.org/records/5499840/files/cta-prod5-zenodo-fitsonly-v0.1.zip\nunzip cta-prod5-zenodo-fitsonly-v0.1.zip\ncd fits\nfor fn in *.gz ; do tar -zxvf $fn; done \n \n', | |
16 ) | |
17 | |
18 import astropy.units as u | |
19 import matplotlib.pyplot as plt | |
20 import numpy as np | |
21 from astropy import wcs | |
22 from astropy.coordinates import SkyCoord | |
23 from astropy.io import fits | |
24 from gammapy.data import Observation | |
25 from gammapy.datasets import MapDataset, MapDatasetEventSampler | |
26 from gammapy.irf import load_irf_dict_from_file | |
27 from gammapy.makers import MapDatasetMaker | |
28 from gammapy.maps import MapAxis, WcsGeom | |
29 from gammapy.modeling.models import FoVBackgroundModel, Models | |
30 from numpy import cos, exp, pi, sqrt | |
31 from oda_api.api import ProgressReporter | |
32 from oda_api.data_products import BinaryProduct, PictureProduct | |
33 from regions import CircleSkyRegion | |
34 | |
35 # from gammapy.irf import load_cta_irfs | |
36 | |
37 RA = 166.113809 # http://odahub.io/ontology#PointOfInterestRA | |
38 DEC = 38.208833 # http://odahub.io/ontology#PointOfInterestDEC | |
39 | |
40 OffAxis_angle = 0.78 # http://odahub.io/ontology#AngleDegrees | |
41 # Exposure time in hours | |
42 Texp = 1.0 # http://odahub.io/ontology#TimeIntervalHours | |
43 # Source redshift | |
44 z = 0.03 # http://odahub.io/ontology#Float | |
45 # Source flux normalisaiton F0 in 1/(TeV cm2 s) at reference energy E0 | |
46 F0 = 1e-11 # http://odahub.io/ontology#Float | |
47 E0 = 1.0 # http://odahub.io/ontology#Energy_TeV | |
48 Gamma = 2.0 # http://odahub.io/ontology#Float | |
49 | |
50 Radius_spectal_extraction = 0.2 # http://odahub.io/ontology#Float | |
51 Radius_sky_image = 2.5 # http://odahub.io/ontology#AngleDegrees | |
52 | |
53 Site = "North" # http://odahub.io/ontology#String ; oda:allowed_value "North","South" | |
54 Telescope_LST = True # http://odahub.io/ontology#Boolean | |
55 Telescope_MST = True # http://odahub.io/ontology#Boolean | |
56 Telescope_SST = False # http://odahub.io/ontology#Boolean | |
57 | |
58 _galaxy_wd = os.getcwd() | |
59 | |
60 with open("inputs.json", "r") as fd: | |
61 inp_dic = json.load(fd) | |
62 if "_data_product" in inp_dic.keys(): | |
63 inp_pdic = inp_dic["_data_product"] | |
64 else: | |
65 inp_pdic = inp_dic | |
66 | |
67 for vn, vv in inp_pdic.items(): | |
68 if vn != "_selector": | |
69 globals()[vn] = type(globals()[vn])(vv) | |
70 | |
71 LSTs = Telescope_LST | |
72 MSTs = Telescope_MST | |
73 SSTs = Telescope_SST | |
74 | |
75 Texp = Texp * 3600.0 | |
76 DEC_pnt = DEC | |
77 cdec = cos(DEC * pi / 180.0) | |
78 RA_pnt = RA - OffAxis_angle / cdec | |
79 Radius = Radius_sky_image | |
80 R_s = Radius_spectal_extraction | |
81 | |
82 pointing = SkyCoord(RA_pnt, DEC_pnt, unit="deg", frame="icrs") | |
83 coord_s = SkyCoord(RA, DEC, unit="deg", frame="icrs") | |
84 RA_bkg = RA_pnt - (RA - RA_pnt) | |
85 DEC_bkg = DEC_pnt - (DEC - DEC_pnt) | |
86 coord_b = SkyCoord(RA_bkg, DEC_bkg, unit="deg", frame="icrs") | |
87 offaxis = coord_s.separation(pointing).deg | |
88 pr = ProgressReporter() | |
89 pr.report_progress(stage="Progress", progress=10.0) | |
90 | |
91 CTA_south_lat = -25.0 | |
92 CTA_north_lat = 18.0 | |
93 if Site == "North": | |
94 Zd = abs(DEC - CTA_north_lat) | |
95 if Zd < 30.0: | |
96 Zd = "20deg-" | |
97 elif Zd < 50: | |
98 Zd = "40deg-" | |
99 elif Zd < 70.0: | |
100 Zd = "60deg-" | |
101 else: | |
102 raise RuntimeError("Source not visible from " + Site) | |
103 if DEC > CTA_north_lat: | |
104 N_S = "NorthAz-" | |
105 else: | |
106 N_S = "SouthAz-" | |
107 if LSTs: | |
108 tel = "4LSTs" | |
109 if MSTs: | |
110 tel += "09MSTs" | |
111 if SSTs: | |
112 raise RuntimeError("No SSTs on the North site") | |
113 filename = "IRFS/fits/Prod5-North-" + Zd + N_S + tel | |
114 else: | |
115 Zd = abs(DEC - CTA_south_lat) | |
116 if Zd < 30.0: | |
117 Zd = "20deg-" | |
118 elif Zd < 50: | |
119 Zd = "40deg-" | |
120 elif Zd < 70.0: | |
121 Zd = "60deg-" | |
122 else: | |
123 raise RuntimeError("Source not visible from " + Site) | |
124 if DEC > CTA_south_lat: | |
125 N_S = "NorthAz-" | |
126 else: | |
127 N_S = "SouthAz-" | |
128 if MSTs: | |
129 tel = "14MSTs" | |
130 if SSTs: | |
131 tel += "37MSTs" | |
132 if LSTs: | |
133 raise RuntimeError("No LSTs on the South site") | |
134 filename = "IRFS/fits/Prod5-South-" + Zd + N_S + tel | |
135 | |
136 if Texp < 1800: | |
137 filename += ".1800s-v0.1.fits.gz" | |
138 elif Texp < 18000: | |
139 filename += ".18000s-v0.1.fits.gz" | |
140 else: | |
141 filename += ".180000s-v0.1.fits.gz" | |
142 import os | |
143 | |
144 print(filename) | |
145 if os.path.exists(filename) == False: | |
146 raise RuntimeError("No reponse function found") | |
147 message = "No reponse function found!" | |
148 | |
149 hdul = fits.open(filename) | |
150 aeff = hdul["EFFECTIVE AREA"].data | |
151 ENERG_LO = aeff["ENERG_LO"][0] | |
152 ENERG_HI = aeff["ENERG_HI"][0] | |
153 THETA_LO = aeff["THETA_LO"][0] | |
154 THETA_HI = aeff["THETA_HI"][0] | |
155 EFFAREA = aeff["EFFAREA"][0] | |
156 print(offaxis) | |
157 ind_offaxis = len(THETA_LO[THETA_LO < offaxis] - 1) | |
158 EFAREA = EFFAREA[ind_offaxis] | |
159 HDU_EFFAREA = hdul["EFFECTIVE AREA"] | |
160 HDU_RMF = hdul["ENERGY DISPERSION"] | |
161 | |
162 E = np.logspace(-2, 2, 20) | |
163 | |
164 d = np.genfromtxt("Franceschini17.txt") | |
165 ee = d[:, 0] | |
166 z_grid = np.array([0.01, 0.03, 0.1, 0.3, 0.5, 1.0, 1.5, 2.0, 3.0]) | |
167 ind = len(z_grid[z > z_grid]) - 1 | |
168 coeff = (z - z_grid[ind]) / (z_grid[ind + 1] - z_grid[ind]) | |
169 tau = d[:, ind + 1] + coeff * d[:, ind + 2] | |
170 tau_interp = np.interp(E, ee, tau) | |
171 | |
172 def powerlaw_EBL(): | |
173 return F0 * (E / E0) ** (-Gamma) * exp(-tau_interp) | |
174 | |
175 F = powerlaw_EBL() | |
176 plt.plot(E, E**2 * F) | |
177 plt.xscale("log") | |
178 plt.yscale("log") | |
179 plt.ylim(1e-14, 1e-10) | |
180 | |
181 # filename = "IRFS/fits/Prod5-North-20deg-AverageAz-4LSTs09MSTs.180000s-v0.1.fits.gz" | |
182 IRFS = load_irf_dict_from_file(filename) | |
183 dic = { | |
184 "components": [ | |
185 { | |
186 "name": "Source1", | |
187 "type": "SkyModel", | |
188 "spectral": { | |
189 "type": "TemplateSpectralModel", | |
190 "parameters": [{"name": "norm", "value": 1.0}], | |
191 "energy": {"data": E.tolist(), "unit": "TeV"}, | |
192 "values": {"data": F.tolist(), "unit": "1 / (cm2 TeV s)"}, | |
193 }, | |
194 "spatial": { | |
195 "type": "PointSpatialModel", | |
196 "frame": "icrs", | |
197 "parameters": [ | |
198 {"name": "lon_0", "value": RA, "unit": "deg"}, | |
199 {"name": "lat_0", "value": DEC, "unit": "deg"}, | |
200 ], | |
201 }, | |
202 }, | |
203 { | |
204 "type": "FoVBackgroundModel", | |
205 "datasets_names": ["my-dataset"], | |
206 "spectral": { | |
207 "type": "PowerLawNormSpectralModel", | |
208 "parameters": [ | |
209 {"name": "norm", "value": 1.0}, | |
210 {"name": "tilt", "value": 0.0}, | |
211 {"name": "reference", "value": 1.0, "unit": "TeV"}, | |
212 ], | |
213 }, | |
214 }, | |
215 ] | |
216 } | |
217 modelsky = Models.from_dict(dic) | |
218 | |
219 bkg_model = FoVBackgroundModel(dataset_name="my-dataset") | |
220 | |
221 observation = Observation.create( | |
222 obs_id="0", pointing=pointing, livetime=str(Texp) + " s", irfs=IRFS | |
223 ) | |
224 | |
225 print(f"Create the dataset for {pointing}") | |
226 energy_axis = MapAxis.from_energy_bounds( | |
227 "0.012 TeV", "100 TeV", nbin=10, per_decade=True | |
228 ) | |
229 energy_axis_true = MapAxis.from_energy_bounds( | |
230 "0.001 TeV", "300 TeV", nbin=20, per_decade=True, name="energy_true" | |
231 ) | |
232 migra_axis = MapAxis.from_bounds( | |
233 0.5, 2, nbin=150, node_type="edges", name="migra" | |
234 ) | |
235 | |
236 geom = WcsGeom.create( | |
237 skydir=pointing, | |
238 width=(2 * Radius, 2 * Radius), | |
239 binsz=0.02, | |
240 frame="icrs", | |
241 axes=[energy_axis], | |
242 ) | |
243 | |
244 empty = MapDataset.create( | |
245 geom, | |
246 energy_axis_true=energy_axis_true, | |
247 migra_axis=migra_axis, | |
248 name="my-dataset", | |
249 ) | |
250 maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"]) | |
251 dataset = maker.run(empty, observation) | |
252 | |
253 region_sky = CircleSkyRegion(center=pointing, radius=Radius * u.deg) | |
254 mask_map = dataset.geoms["geom"].region_mask(region_sky) | |
255 mod = modelsky.select_mask(mask_map) | |
256 | |
257 bkg_idx = np.where(np.array(modelsky.names) == "my-dataset-bkg") | |
258 mod.append(modelsky[int(bkg_idx[0][0])]) | |
259 | |
260 dataset.models = mod | |
261 | |
262 for m in dataset.models[:-1]: | |
263 sep = m.spatial_model.position.separation(pointing).deg | |
264 print( | |
265 f"This is the spatial separation of {m.name} from the pointing direction: {sep}" | |
266 ) | |
267 pr.report_progress(stage="Progress", progress=50.0) | |
268 print("Simulate...") | |
269 sampler = MapDatasetEventSampler() | |
270 events = sampler.run(dataset, observation) | |
271 | |
272 pr.report_progress(stage="Progress", progress=90.0) | |
273 print(f"Save events ...") | |
274 primary_hdu = fits.PrimaryHDU() | |
275 hdu_evt = fits.BinTableHDU(events.table) | |
276 hdu_gti = fits.BinTableHDU(dataset.gti.table, name="GTI") | |
277 hdu_all = fits.HDUList([primary_hdu, hdu_evt, hdu_gti, HDU_EFFAREA, HDU_RMF]) | |
278 hdu_all.writeto(f"./events.fits", overwrite=True) | |
279 | |
280 hdul = fits.open("events.fits") | |
281 ev = hdul["EVENTS"].data | |
282 ra = ev["RA"] | |
283 dec = ev["DEC"] | |
284 coords = SkyCoord(ra, dec, unit="degree") | |
285 en = ev["ENERGY"] | |
286 | |
287 from matplotlib.colors import LogNorm | |
288 | |
289 plt.figure() | |
290 pixsize = 0.1 | |
291 Nbins = 2 * int(Radius / pixsize) + 1 | |
292 ra0 = np.mean(ra) | |
293 dec0 = np.mean(dec) | |
294 from numpy import cos, pi | |
295 | |
296 cdec = cos(DEC_pnt * pi / 180.0) | |
297 ra_bins = np.linspace( | |
298 RA_pnt - Radius / cdec, RA_pnt + Radius / cdec, Nbins + 1 | |
299 ) | |
300 dec_bins = np.linspace(DEC_pnt - Radius, DEC_pnt + Radius, Nbins + 1) | |
301 | |
302 h = plt.hist2d(ra, dec, bins=[ra_bins, dec_bins], norm=LogNorm()) | |
303 image = h[0] | |
304 plt.colorbar() | |
305 plt.xlabel("RA") | |
306 plt.ylabel("Dec") | |
307 | |
308 plt.figure() | |
309 ev_src = en[coords.separation(coord_s).deg < R_s] | |
310 ev_bkg = en[coords.separation(coord_b).deg < R_s] | |
311 ENERG_BINS = np.concatenate((ENERG_LO, [ENERG_HI[-1]])) | |
312 ENERG = sqrt(ENERG_LO * ENERG_HI) | |
313 h1 = np.histogram(ev_src, bins=ENERG_BINS) | |
314 h2 = np.histogram(ev_bkg, bins=ENERG_BINS) | |
315 cts_s = h1[0] | |
316 cts_b = h2[0] | |
317 src = cts_s - cts_b | |
318 src_err = sqrt(cts_s + cts_b) | |
319 plt.errorbar(ENERG, src, src_err) | |
320 plt.axhline(0, linestyle="dashed", color="black") | |
321 plt.xscale("log") | |
322 plt.xlabel(r"$E$, TeV") | |
323 plt.ylabel("Counts") | |
324 plt.yscale("log") | |
325 plt.ylim(0.1, 2 * max(src)) | |
326 plt.savefig("Count_spectrum.png") | |
327 | |
328 plt.figure() | |
329 sep_s = coords.separation(coord_s).deg | |
330 sep_b = coords.separation(coord_b).deg | |
331 plt.hist(sep_s**2, bins=np.linspace(0, 0.5, 50)) | |
332 plt.hist(sep_b**2, bins=np.linspace(0, 0.5, 50)) | |
333 plt.axvline(R_s**2, color="black", linestyle="dashed") | |
334 plt.xlabel(r"$\theta^2$, degrees") | |
335 plt.ylabel("Counts") | |
336 plt.savefig("Theta2_plot.png") | |
337 | |
338 # Create a new WCS object. The number of axes must be set | |
339 # from the start | |
340 plt.figure() | |
341 w = wcs.WCS(naxis=2) | |
342 | |
343 w.wcs.ctype = ["RA---CAR", "DEC--CAR"] | |
344 # we need a Plate carrée (CAR) projection since histogram is binned by ra-dec | |
345 # the peculiarity here is that CAR projection produces rectilinear grid only if CRVAL2==0 | |
346 # also, we will follow convention of RA increasing from right to left (CDELT1<0, need to flip an input image) | |
347 # otherwise, aladin-lite doesn't show it | |
348 w.wcs.crval = [RA_pnt, 0] | |
349 w.wcs.crpix = [Nbins / 2.0 + 0.5, 1 - dec_bins[0] / pixsize] | |
350 w.wcs.cdelt = np.array([-pixsize / cdec, pixsize]) | |
351 | |
352 header = w.to_header() | |
353 | |
354 hdu = fits.PrimaryHDU(np.flip(image.T, axis=1), header=header) | |
355 hdu.writeto("Image.fits", overwrite=True) | |
356 hdu = fits.open("Image.fits") | |
357 im = hdu[0].data | |
358 wcs1 = wcs.WCS(hdu[0].header) | |
359 ax = plt.subplot(projection=wcs1) | |
360 lon = ax.coords["ra"] | |
361 lon.set_major_formatter("d.dd") | |
362 lat = ax.coords["dec"] | |
363 lat.set_major_formatter("d.dd") | |
364 plt.imshow(im, origin="lower") | |
365 plt.colorbar(label="Counts") | |
366 | |
367 plt.scatter( | |
368 [RA_pnt], | |
369 [DEC_pnt], | |
370 marker="x", | |
371 color="white", | |
372 alpha=0.5, | |
373 transform=ax.get_transform("world"), | |
374 ) | |
375 plt.scatter( | |
376 [RA], | |
377 [DEC], | |
378 marker="+", | |
379 color="red", | |
380 alpha=0.5, | |
381 transform=ax.get_transform("world"), | |
382 ) | |
383 plt.grid(color="white", ls="solid") | |
384 plt.xlabel("RA") | |
385 plt.ylabel("Dec") | |
386 plt.savefig("Image.png", format="png", bbox_inches="tight") | |
387 | |
388 fits_events = BinaryProduct.from_file("events.fits") | |
389 bin_image1 = PictureProduct.from_file("Image.png") | |
390 bin_image2 = PictureProduct.from_file("Theta2_plot.png") | |
391 bin_image3 = PictureProduct.from_file("Count_spectrum.png") | |
392 pr.report_progress(stage="Progress", progress=100.0) | |
393 | |
394 image_png = bin_image1 # http://odahub.io/ontology#ODAPictureProduct | |
395 theta2_png = bin_image2 # http://odahub.io/ontology#ODAPictureProduct | |
396 spectrum_png = bin_image3 # http://odahub.io/ontology#ODAPictureProduct | |
397 event_list_fits = fits_events # http://odahub.io/ontology#ODABinaryProduct | |
398 | |
399 # output gathering | |
400 _galaxy_meta_data = {} | |
401 _oda_outs = [] | |
402 _oda_outs.append( | |
403 ("out_pre_defined_model_image_png", "image_png_galaxy.output", image_png) | |
404 ) | |
405 _oda_outs.append( | |
406 ( | |
407 "out_pre_defined_model_theta2_png", | |
408 "theta2_png_galaxy.output", | |
409 theta2_png, | |
410 ) | |
411 ) | |
412 _oda_outs.append( | |
413 ( | |
414 "out_pre_defined_model_spectrum_png", | |
415 "spectrum_png_galaxy.output", | |
416 spectrum_png, | |
417 ) | |
418 ) | |
419 _oda_outs.append( | |
420 ( | |
421 "out_pre_defined_model_event_list_fits", | |
422 "event_list_fits_galaxy.output", | |
423 event_list_fits, | |
424 ) | |
425 ) | |
426 | |
427 for _outn, _outfn, _outv in _oda_outs: | |
428 _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) | |
429 if isinstance(_outv, str) and os.path.isfile(_outv): | |
430 shutil.move(_outv, _galaxy_outfile_name) | |
431 _galaxy_meta_data[_outn] = {"ext": "_sniff_"} | |
432 elif getattr(_outv, "write_fits_file", None): | |
433 _outv.write_fits_file(_galaxy_outfile_name) | |
434 _galaxy_meta_data[_outn] = {"ext": "fits"} | |
435 elif getattr(_outv, "write_file", None): | |
436 _outv.write_file(_galaxy_outfile_name) | |
437 _galaxy_meta_data[_outn] = {"ext": "_sniff_"} | |
438 else: | |
439 with open(_galaxy_outfile_name, "w") as fd: | |
440 json.dump(_outv, fd, cls=CustomJSONEncoder) | |
441 _galaxy_meta_data[_outn] = {"ext": "json"} | |
442 | |
443 with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: | |
444 json.dump(_galaxy_meta_data, fd) | |
445 print("*** Job finished successfully ***") |