comparison model_cube_file.py @ 0:2f3e314c3dfa draft default tip

planemo upload for repository https://github.com/esg-epfl-apc/tools-astro/tree/main/tools commit 4543470805fc78f6cf2604b9d55beb6f06359995
author astroteam
date Fri, 19 Apr 2024 10:06:21 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:2f3e314c3dfa
1 #!/usr/bin/env python
2 # coding: utf-8
3
4 # flake8: noqa
5
6 import json
7 import os
8 import shutil
9
10 from oda_api.json import CustomJSONEncoder
11
12 get_ipython().run_cell_magic( # noqa: F821
13 "bash",
14 "",
15 'rm -r IRFS | echo "Ok"\nmkdir IRFS\ncd IRFS\nwget https://zenodo.org/records/5499840/files/cta-prod5-zenodo-fitsonly-v0.1.zip\nunzip cta-prod5-zenodo-fitsonly-v0.1.zip\ncd fits\nfor fn in *.gz ; do tar -zxvf $fn; done \n',
16 )
17
18 import sys
19
20 sys.path.append(".")
21 from pathlib import Path
22
23 import astropy.units as u
24 import matplotlib.pyplot as plt
25 import numpy as np
26 from astropy import units as u
27 from astropy import wcs
28 from astropy.coordinates import SkyCoord
29 from astropy.io import fits
30 from gammapy.data import (
31 FixedPointingInfo,
32 Observation,
33 PointingMode,
34 observatory_locations,
35 )
36 from gammapy.datasets import MapDataset, MapDatasetEventSampler
37 from gammapy.irf import load_irf_dict_from_file
38 from gammapy.makers import MapDatasetMaker
39 from gammapy.maps import Map, MapAxis
40 from gammapy.modeling.models import (
41 FoVBackgroundModel,
42 Models,
43 SkyModel,
44 TemplateSpatialModel,
45 )
46 from numpy import cos, pi, sqrt
47 from oda_api.api import ProgressReporter
48 from oda_api.data_products import BinaryProduct, PictureProduct
49
50 # not for run on Galaxy
51 # %%bash
52 # git lfs install
53 # git lfs pull
54
55 data_cube = "3d.fits" # http://odahub.io/ontology#POSIXPath
56
57 # Source flux normalisaiton F0 in 1/(TeV cm2 s) at reference energy E0
58 F0 = 1e-11 # http://odahub.io/ontology#Float
59 E0 = 1.0 # http://odahub.io/ontology#Energy_TeV
60
61 OffAxis_angle = 0.4 # http://odahub.io/ontology#AngleDegrees
62
63 Radius_spectal_extraction = 0.2 # http://odahub.io/ontology#AngleDegrees
64 Radius_sky_image = 2.5 # http://odahub.io/ontology#AngleDegrees
65
66 Site = "North" # http://odahub.io/ontology#String ; oda:allowed_value "North","South"
67 Telescopes_LST = True # http://odahub.io/ontology#Boolean
68 Telescopes_MST = True # http://odahub.io/ontology#Boolean
69 Telescopes_SST = False # http://odahub.io/ontology#Boolean
70
71 Texp = 1.0 # http://odahub.io/ontology#TimeIntervalHours
72
73 _galaxy_wd = os.getcwd()
74
75 with open("inputs.json", "r") as fd:
76 inp_dic = json.load(fd)
77 if "_data_product" in inp_dic.keys():
78 inp_pdic = inp_dic["_data_product"]
79 else:
80 inp_pdic = inp_dic
81
82 for vn, vv in inp_pdic.items():
83 if vn != "_selector":
84 globals()[vn] = type(globals()[vn])(vv)
85
86 R_s = Radius_spectal_extraction
87 Radius = Radius_sky_image
88 LSTs = Telescopes_LST
89 MSTs = Telescopes_MST
90 SSTs = Telescopes_SST
91 file_path = data_cube
92
93 pr = ProgressReporter()
94 pr.report_progress(stage="Progress", progress=10.0)
95
96 print("loading " + file_path)
97 cube_map = Map.read(file_path)
98 cube_map.geom
99
100 print("locating source")
101 # source = SkyCoord.from_name(src_name, frame='icrs', parse=False, cache=True)
102 source = cube_map.geom.center_skydir
103 DEC = float(source.dec / u.deg)
104 RA = float(source.ra / u.deg)
105
106 CTA_south_lat = -25.0
107 CTA_north_lat = 18.0
108 filename = ""
109 if Site == "North":
110 Zd = abs(DEC - CTA_north_lat)
111 if Zd < 30.0:
112 Zd = "20deg-"
113 elif Zd < 50:
114 Zd = "40deg-"
115 elif Zd < 70.0:
116 Zd = "60deg-"
117 else:
118 raise RuntimeError("Source not visible from " + Site)
119 if DEC > CTA_north_lat:
120 N_S = "NorthAz-"
121 else:
122 N_S = "SouthAz-"
123 if LSTs:
124 tel = "4LSTs"
125 if MSTs:
126 tel += "09MSTs"
127 if SSTs:
128 raise RuntimeError("No SSTs on the North site")
129 filename = "IRFS/fits/Prod5-North-" + Zd + N_S + tel
130 else:
131 Zd = abs(DEC - CTA_south_lat)
132 if Zd < 30.0:
133 Zd = "20deg-"
134 elif Zd < 50:
135 Zd = "40deg-"
136 elif Zd < 70.0:
137 Zd = "60deg-"
138 else:
139 raise RuntimeError("Source not visible from " + Site)
140 if DEC > CTA_south_lat:
141 N_S = "NorthAz-"
142 else:
143 N_S = "SouthAz-"
144 if MSTs:
145 tel = "14MSTs"
146 if SSTs:
147 tel += "37MSTs"
148 if LSTs:
149 raise RuntimeError("No LSTs on the South site")
150 filename = "IRFS/fits/Prod5-South-" + Zd + N_S + tel
151
152 if Texp < 1800:
153 filename += ".1800s-v0.1.fits.gz"
154 elif Texp < 18000:
155 filename += ".18000s-v0.1.fits.gz"
156 else:
157 filename += ".180000s-v0.1.fits.gz"
158
159 import os
160
161 print(filename)
162 if os.path.exists(filename) == False:
163 raise RuntimeError("No reponse function found")
164 message = "No reponse function found!"
165
166 # telescope pointing will be shifted slightly
167 cdec = cos(DEC * pi / 180.0)
168 RA_pnt = RA - OffAxis_angle / cdec
169 DEC_pnt = DEC
170 pnt = SkyCoord(RA_pnt, DEC_pnt, unit="degree")
171
172 # telescope is pointing at a fixed position in ICRS for the observation
173 pointing = FixedPointingInfo(fixed_icrs=pnt, mode=PointingMode.POINTING)
174
175 location = observatory_locations["cta_south"]
176
177 print("loading IRFs")
178
179 # irfs = load_irf_dict_from_file(path / irf_filename)
180 # filename = "data/Prod5-North-20deg-AverageAz-4LSTs09MSTs.180000s-v0.1.fits.gz"
181 irfs_filename = (
182 "IRFS/fits/Prod5-North-20deg-AverageAz-4LSTs09MSTs.180000s-v0.1.fits.gz"
183 )
184 irfs = load_irf_dict_from_file(irfs_filename)
185
186 print("Creating observation")
187 livetime = Texp * u.hr
188 observation = Observation.create(
189 obs_id=1001,
190 pointing=pointing,
191 livetime=livetime,
192 irfs=irfs,
193 location=location,
194 )
195 print(observation)
196
197 def GetBinSpectralModel(
198 E, bins_per_decade=20, amplitude=1e-12 * u.Unit("cm-2 s-1")
199 ):
200 # amplitude=1e-12 * u.Unit("cm-2 s-1") * norm
201 from gammapy.modeling.models import GaussianSpectralModel
202
203 sigma = (10 ** (1 / bins_per_decade) - 1) * E
204 return GaussianSpectralModel(mean=E, sigma=sigma, amplitude=amplitude)
205
206 print("Calculate energy range")
207
208 # selected_n_bins_per_decade = 20 # n bins per decade
209 max_rel_energy_error = 3
210
211 energy_axis = cube_map.geom.axes["energy"]
212 EminMap = energy_axis.edges[0]
213 EmaxMap = energy_axis.edges[-1]
214 stepE = energy_axis.edges[1] / energy_axis.edges[0]
215 nbins_per_decade = int(np.round(np.log(10) / np.log(stepE)))
216 Emin = EminMap / max_rel_energy_error
217 Emax = EmaxMap * max_rel_energy_error
218 nbins_per_decade, Emin, Emax
219
220 print("Create empty dataset")
221
222 # energy_axis = MapAxis.from_energy_bounds(max_rel_energy_error*Emin*u.TeV, Emax*u.TeV, nbin=selected_n_bins_per_decade, per_decade=True)
223 energy_axis_true = MapAxis.from_energy_bounds(
224 Emin, Emax, nbin=nbins_per_decade, per_decade=True, name="energy_true"
225 ) # TODO: get from geom
226 migra_axis = MapAxis.from_bounds(
227 1.0 / max_rel_energy_error,
228 max_rel_energy_error,
229 nbin=150,
230 node_type="edges",
231 name="migra",
232 )
233 # TODO: get from geom
234
235 geom = cube_map.geom
236
237 empty = MapDataset.create(
238 geom,
239 energy_axis_true=energy_axis_true,
240 migra_axis=migra_axis,
241 name="my-dataset",
242 )
243 maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"])
244 dataset = maker.run(empty, observation)
245
246 Path("event_sampling").mkdir(exist_ok=True)
247 dataset.write("./event_sampling/dataset.fits", overwrite=True)
248
249 print("Plotting GaussianSpectralModel")
250 from gammapy.modeling.models import GaussianSpectralModel
251
252 meanE = 1 * u.TeV
253 bins_per_decade = 20
254 sigma = (10 ** (1 / bins_per_decade) - 1) * meanE
255 amplitude = 1 * u.Unit("cm-2 s-1")
256 gm = GaussianSpectralModel(mean=meanE, sigma=sigma, amplitude=amplitude)
257 ax = gm.plot(energy_bounds=(0.1, 100) * u.TeV)
258 ax.set_yscale("linear")
259 gm.integral(meanE - 3 * sigma, meanE + 3 * sigma)
260
261 print("cube_map.get_spectrum()")
262 spec = cube_map.get_spectrum()
263 spec
264
265 # print('spec.plot()')
266 # spec.plot() # this plot shows dN/dE * E
267
268 pr.report_progress(stage="Progress", progress=20.0)
269
270 print("Find norm bin")
271
272 energy_bins = cube_map.geom.axes["energy"].center
273 len(energy_bins), float(np.max(energy_bins) / u.TeV)
274 norm_bin = 0
275 for i, E in enumerate(energy_bins):
276 if E > E0 * u.TeV:
277 norm_bin = i
278 break
279 assert norm_bin > 0
280 norm_bin
281
282 print("obtain norm_bin_width")
283 norm_bin_width = cube_map.geom.axes["energy"].bin_width[norm_bin]
284 norm_bin_width
285
286 print("find norm_flux")
287 # Npart=5000 # TODO update
288 # n_events_reduction_factor = 1 # suppress flux factor
289
290 int_bin_flux = (
291 spec.data.flatten()
292 ) # we don't have to multiply by energy_bins /u.TeV since spectrum is already multiplied by E (see above)
293 norm_flux = int_bin_flux[norm_bin] / norm_bin_width
294 norm_flux
295 # int_bin_flux /= (Npart/200000 * np.max(int_bin_flux) * n_events_reduction_factor * 20/len(energy_bins)) # roughly 100 events
296 # int_bin_flux
297
298 print("find mult")
299 mult = F0 * u.Unit("cm-2 s-1 TeV-1") / norm_flux # .decompose()
300 mult
301
302 print("find int_bin_flux")
303 int_bin_flux = mult * int_bin_flux
304
305 int_bin_flux
306
307 pr.report_progress(stage="Progress", progress=30.0)
308
309 print("Creating bin_models")
310
311 bin_models = []
312 for i, (flux, E) in enumerate(zip(int_bin_flux, energy_bins)):
313 # print(i)
314 if flux == 0:
315 print("skipping bin ", i)
316 continue
317 # print(flux)
318 spectral_model_delta = GetBinSpectralModel(
319 E, amplitude=flux
320 ) # normalizing here
321 spacial_template_model = TemplateSpatialModel(
322 cube_map.slice_by_idx({"energy": i}),
323 filename=f"cube_bin{i}.fit",
324 normalize=True,
325 )
326 sky_bin_model = SkyModel(
327 spectral_model=spectral_model_delta,
328 spatial_model=spacial_template_model,
329 name=f"bin_{i}",
330 )
331 bin_models.append(sky_bin_model)
332
333 print("Creating bkg_model")
334 bkg_model = FoVBackgroundModel(dataset_name="my-dataset")
335 models = Models(bin_models + [bkg_model])
336
337 print("dataset.models = models")
338 dataset.models = models
339
340 print("Creating sampler")
341 sampler = MapDatasetEventSampler(random_state=0)
342 print("Running sampler")
343 events = sampler.run(dataset, observation)
344
345 hdul = fits.open(filename)
346 aeff = hdul["EFFECTIVE AREA"].data
347 ENERG_LO = aeff["ENERG_LO"][0]
348 ENERG_HI = aeff["ENERG_HI"][0]
349 THETA_LO = aeff["THETA_LO"][0]
350 THETA_HI = aeff["THETA_HI"][0]
351 EFFAREA = aeff["EFFAREA"][0]
352 ind_offaxis = len(THETA_LO[THETA_LO < OffAxis_angle] - 1)
353 EFAREA = EFFAREA[ind_offaxis]
354 HDU_EFFAREA = hdul["EFFECTIVE AREA"]
355 HDU_RMF = hdul["ENERGY DISPERSION"]
356
357 pr.report_progress(stage="Progress", progress=80.0)
358
359 print(f"Save events ...")
360 primary_hdu = fits.PrimaryHDU()
361 hdu_evt = fits.BinTableHDU(events.table)
362 hdu_gti = fits.BinTableHDU(dataset.gti.table, name="GTI")
363 hdu_all = fits.HDUList([primary_hdu, hdu_evt, hdu_gti, HDU_EFFAREA, HDU_RMF])
364 hdu_all.writeto(f"./events.fits", overwrite=True)
365
366 print(f"Reading events ...")
367 hdul = fits.open("events.fits")
368 ev = hdul["EVENTS"].data
369 ra = ev["RA"]
370 dec = ev["DEC"]
371 en = ev["ENERGY"]
372
373 [cube_map.geom.center_coord[i] / cube_map.geom.data_shape[i] for i in (0, 1)]
374
375 ra_bins, dec_bins = (int(2 * x) for x in cube_map.geom.center_pix[:2])
376 ra_bins, dec_bins
377
378 Radius = float(min(cube_map.geom.width / 2 / u.degree).decompose())
379
380 print(f"Building event image ...")
381 plt.close()
382 pixsize = 0.1
383 from matplotlib.colors import LogNorm
384
385 cube_map.geom.width[0]
386
387 Nbins = 2 * int(Radius / pixsize) + 1
388 ra0 = np.mean(ra)
389 dec0 = np.mean(dec)
390 from numpy import cos, pi
391
392 cdec = cos(DEC_pnt * pi / 180.0)
393 ra_bins = np.linspace(RA - Radius / cdec, RA + Radius / cdec, Nbins + 1)
394 dec_bins = np.linspace(DEC - Radius, DEC + Radius, Nbins + 1)
395
396 h = plt.hist2d(ra, dec, bins=[ra_bins, dec_bins], norm=LogNorm())
397 image = h[0]
398 plt.colorbar()
399 plt.xlabel("RA")
400 plt.ylabel("Dec")
401
402 print(f"Building event image 2 ...")
403 plt.figure()
404 # Create a new WCS object. The number of axes must be set
405 # from the start
406 w = wcs.WCS(naxis=2)
407
408 w.wcs.ctype = ["RA---CAR", "DEC--CAR"]
409 # we need a Plate carrée (CAR) projection since histogram is binned by ra-dec
410 # the peculiarity here is that CAR projection produces rectilinear grid only if CRVAL2==0
411 # also, we will follow convention of RA increasing from right to left (CDELT1<0, need to flip an input image)
412 # otherwise, aladin-lite doesn't show it
413 w.wcs.crval = [RA_pnt, 0]
414 w.wcs.crpix = [Nbins / 2.0 + 0.5, 1 - dec_bins[0] / pixsize]
415 w.wcs.cdelt = np.array([-pixsize / cdec, pixsize])
416
417 header = w.to_header()
418
419 hdu = fits.PrimaryHDU(np.flip(image.T, axis=1), header=header)
420 hdu.writeto("Image.fits", overwrite=True)
421 hdu = fits.open("Image.fits")
422 im = hdu[0].data
423 wcs1 = wcs.WCS(hdu[0].header)
424 ax = plt.subplot(projection=wcs1)
425 lon = ax.coords["ra"]
426 lon.set_major_formatter("d.dd")
427 lat = ax.coords["dec"]
428 lat.set_major_formatter("d.dd")
429 plt.imshow(im, origin="lower")
430 plt.colorbar(label="Counts")
431
432 plt.scatter(
433 [RA_pnt],
434 [DEC_pnt],
435 marker="x",
436 color="white",
437 alpha=0.5,
438 transform=ax.get_transform("world"),
439 )
440 plt.scatter(
441 [RA],
442 [DEC],
443 marker="+",
444 color="red",
445 alpha=0.5,
446 transform=ax.get_transform("world"),
447 )
448 plt.grid(color="white", ls="solid")
449 plt.xlabel("RA")
450 plt.ylabel("Dec")
451 plt.savefig("Image.png", format="png", bbox_inches="tight")
452
453 print("building event spectrum")
454 plt.close()
455 E = (events.energy / u.TeV).decompose()
456 ras = events.radec.ra.deg
457 decs = events.radec.dec.deg
458 # plt.hist(E,bins=np.logspace(-2,2,41))
459
460 mask = events.table["MC_ID"] > 0
461 plt.hist(E[mask], bins=np.logspace(-2, 2, 41), alpha=0.5, label="source")
462 mask = events.table["MC_ID"] == 0
463 plt.hist(E[mask], bins=np.logspace(-2, 2, 41), alpha=0.5, label="background")
464 plt.xlabel("E, TeV")
465
466 plt.xscale("log")
467 plt.yscale("log")
468 plt.legend(loc="upper right")
469 plt.savefig("event_spectrum.png", format="png")
470
471 coord_s = SkyCoord(RA, DEC, unit="degree")
472 RA_bkg = RA_pnt - (RA - RA_pnt)
473 DEC_bkg = DEC_pnt - (DEC - DEC_pnt)
474 coord_b = SkyCoord(RA_bkg, DEC_bkg, unit="degree")
475 coords = SkyCoord(ra, dec, unit="degree")
476
477 plt.figure()
478 ev_src = en[coords.separation(coord_s).deg < R_s]
479 ev_bkg = en[coords.separation(coord_b).deg < R_s]
480 ENERG_BINS = np.concatenate((ENERG_LO, [ENERG_HI[-1]]))
481 ENERG = sqrt(ENERG_LO * ENERG_HI)
482 h1 = np.histogram(ev_src, bins=ENERG_BINS)
483 h2 = np.histogram(ev_bkg, bins=ENERG_BINS)
484 cts_s = h1[0]
485 cts_b = h2[0]
486 src = cts_s - cts_b
487 src_err = sqrt(cts_s + cts_b)
488 plt.errorbar(ENERG, src, src_err)
489 plt.axhline(0, linestyle="dashed", color="black")
490 plt.xscale("log")
491 plt.xlabel(r"$E$, TeV")
492 plt.ylabel("Counts")
493 plt.yscale("log")
494 plt.ylim(0.1, 2 * max(src))
495 plt.savefig("Count_spectrum.png")
496
497 plt.figure()
498 sep_s = coords.separation(coord_s).deg
499 sep_b = coords.separation(coord_b).deg
500 plt.hist(sep_s**2, bins=np.linspace(0, 0.5, 50))
501 plt.hist(sep_b**2, bins=np.linspace(0, 0.5, 50))
502 plt.axvline(R_s**2, color="black", linestyle="dashed")
503 plt.xlabel(r"$\theta^2$, degrees")
504 plt.ylabel("Counts")
505 plt.savefig("Theta2_plot.png")
506
507 pr.report_progress(stage="Progress", progress=100.0)
508
509 fits_events = BinaryProduct.from_file("events.fits")
510 bin_image = PictureProduct.from_file("Image.png")
511 spec_image = PictureProduct.from_file("Count_spectrum.png")
512 theta2_png = PictureProduct.from_file("Theta2_plot.png")
513
514 spectrum_png = spec_image # http://odahub.io/ontology#ODAPictureProduct
515 theta2_png = theta2_png # http://odahub.io/ontology#ODAPictureProduct
516 image_png = bin_image # http://odahub.io/ontology#ODAPictureProduct
517 event_list_fits = fits_events # http://odahub.io/ontology#ODABinaryProduct
518
519 # output gathering
520 _galaxy_meta_data = {}
521 _oda_outs = []
522 _oda_outs.append(
523 (
524 "out_model_cube_file_spectrum_png",
525 "spectrum_png_galaxy.output",
526 spectrum_png,
527 )
528 )
529 _oda_outs.append(
530 ("out_model_cube_file_theta2_png", "theta2_png_galaxy.output", theta2_png)
531 )
532 _oda_outs.append(
533 ("out_model_cube_file_image_png", "image_png_galaxy.output", image_png)
534 )
535 _oda_outs.append(
536 (
537 "out_model_cube_file_event_list_fits",
538 "event_list_fits_galaxy.output",
539 event_list_fits,
540 )
541 )
542
543 for _outn, _outfn, _outv in _oda_outs:
544 _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn)
545 if isinstance(_outv, str) and os.path.isfile(_outv):
546 shutil.move(_outv, _galaxy_outfile_name)
547 _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
548 elif getattr(_outv, "write_fits_file", None):
549 _outv.write_fits_file(_galaxy_outfile_name)
550 _galaxy_meta_data[_outn] = {"ext": "fits"}
551 elif getattr(_outv, "write_file", None):
552 _outv.write_file(_galaxy_outfile_name)
553 _galaxy_meta_data[_outn] = {"ext": "_sniff_"}
554 else:
555 with open(_galaxy_outfile_name, "w") as fd:
556 json.dump(_outv, fd, cls=CustomJSONEncoder)
557 _galaxy_meta_data[_outn] = {"ext": "json"}
558
559 with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd:
560 json.dump(_galaxy_meta_data, fd)
561 print("*** Job finished successfully ***")