Skip to content

Commit

Permalink
fix: implement fov spectrum (not tested extensively), fixes #53 and p…
Browse files Browse the repository at this point in the history
…artially fixes #51
  • Loading branch information
samanthalwong committed Aug 2, 2024
1 parent 8f2ab94 commit c9e5233
Show file tree
Hide file tree
Showing 4 changed files with 389 additions and 471 deletions.
386 changes: 115 additions & 271 deletions examples/analysis_notebook.ipynb

Large diffs are not rendered by default.

27 changes: 10 additions & 17 deletions examples/example_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@ io:
results_dir: "/raid/romulus/swong/mydl3/crabtest/results/"

# Whether or not to generate the background from the run
from_run : false
from_run : true
# Whether or not to analyze runs from a runlist or all runs in the results_dir
from_runlist: true
from_runlist: false

binning:
# Energy binning for bkg generation
Expand All @@ -23,7 +23,7 @@ binning:
# Offset/wobble binning for bkg generation
off_min: 0
off_max: 2.5
off_bins: 5
off_bins: 20

config:
# Number of cores to be used for parallel bkg generation
Expand All @@ -38,7 +38,6 @@ exclusion:
# Exculde region around a given ra, dec
exclude_regions :
# Exculde region around a given ra, dec
# Exculde region around a given ra, dec
# exclude_source_pos : [ ("287.10 deg", "6.39 deg", "0.5 deg"]
# Exculde entire run within a distance to a given ra/dec
# Expecting ra, dec, exclusion
Expand Down Expand Up @@ -75,8 +74,8 @@ run_selection:
# Whether to get source position from the DL3 header
pos_from_DL3: true
# If pos_from_DL3 is set to false, provide source coordinates in deg
source_ra: 253.4675000
source_dec: 39.7602778
source_ra: 83.633114
source_dec: 22.014487
# Maximum offset in deg to select data from
offset_max : 3.5
# Minimum number of telescopes to select as valid runs
Expand All @@ -85,6 +84,8 @@ run_selection:
el_min : 30

spectrum :
# How the spectral analysis should be done - "RE" for reflected regions, "FoV" for field of view background (extended)
analysis_type: "RE"
# Energy threshold for spectral/integral flux calculations
e_min : 0.2
# Maximum energy for spectral/integral flux calculations
Expand All @@ -97,38 +98,30 @@ spectrum :
# Assumes normalization is in units of TeV^-1 cm^-2 s^-1
params : [3.45e-11,2.6]

lightcurve:
#light curve bin size in minutes
bin_size_min: 1000

sky_map:
# Energy threshold for events to be included in the sky map (TeV)
e_min: 0.01
# Maximum energy for sky map (TeV)
e_max: 100
# Number of energy bins for the sky map
n_bins: 30
# Sky map size (deg)
map_deg: 5
# Sky map binning (deg) - ED default is 0.01, VEGAS is 0.02
# Larger bins run faster
bin_sz: 0.01
offset_max: 1.75
# Safe energy threshold (as a %) for effective area - ED doesn't use this, so it's set fairly small
aeff_max_percent: 0.1
#exclusion regions are lists of lists of [[SkyCoord, radius], [SkyCoord, radius], ...]
exclusion_regions: []
#theta defines the ON region size (deg)
theta: 0.0894427191
#exclusion region around the source (deg)
on_exclusion_region: 0.35
#RBM parameters
ring_rin: "0.6 deg"
ring_width: "0.133333 deg"
ring_rin: "0.53 deg"
ring_width: "0.14 deg"
#whether or not to truncate the skymap colourbar at +/- 5 sigma
truncate: true
#dimmest visual magnitude for stars to be excluded
min_star_brightness: 8
min_star_brightness: 6

#where to save outputs
results_file: 'crab_test_results.yaml'
Expand Down
214 changes: 170 additions & 44 deletions gammapy_tools/analysis/data_products.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,27 @@
FluxPointsDataset,
SpectrumDataset,
)
from gammapy.datasets import MapDataset
from gammapy.estimators import FluxPointsEstimator, FluxPoints
from gammapy.makers import (
ReflectedRegionsBackgroundMaker,
SafeMaskMaker,
SpectrumDatasetMaker,
MapDatasetMaker,
FoVBackgroundMaker,
)
from gammapy.analysis import Analysis, AnalysisConfig
from gammapy.modeling import Fit
from gammapy.maps import MapAxis, RegionGeom, WcsGeom
from gammapy.modeling import Fit
from gammapy.modeling.models import (
ExpCutoffPowerLawSpectralModel,
PowerLawSpectralModel,
LogParabolaSpectralModel,
SkyModel,
FoVBackgroundModel,
Models,
GaussianSpatialModel,
)
from gammapy.estimators import LightCurveEstimator
from gammapy.visualization import plot_spectrum_datasets_off_regions
Expand Down Expand Up @@ -187,16 +195,6 @@ def make_spectrum_RE(
time = info_table["livetime"].to("h")
sig = info_table["sqrt_ts"]

# plot exclusion regions and reflected regions
if len(exclusion_regions) > 0 and plot:
plt.figure(figsize=(8, 8))
ax = exclusion_mask.plot()
on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta", label="ON")
plot_spectrum_datasets_off_regions(
ax=ax, datasets=datasets
) # add legend=True to plot run numbers associated w/ OFF regions
plt.show()

# make spectrum model from user input
if spectrum == "PL":
# power law spectral model
Expand All @@ -220,13 +218,6 @@ def make_spectrum_RE(
spectral_model = LogParabolaSpectralModel(
alpha=float(alp), amplitude=float(amp), reference=1 * u.TeV, beta=float(bet)
)
# calculate integral flux from spectral model
integral_flux = spectral_model.integral(
e_min * u.Unit("TeV"), e_max * u.Unit("TeV")
)
integral_flux_err = spectral_model.integral_error(
e_min * u.Unit("TeV"), e_max * u.Unit("TeV")
)

model = SkyModel(spectral_model=spectral_model, name="my_source")
datasets.models = [model]
Expand Down Expand Up @@ -257,11 +248,7 @@ def make_spectrum_RE(

return (
flux_points,
result_joint.models,
time,
sig,
integral_flux,
integral_flux_err[0],
result_joint.models[0].spectral_model,
)


Expand Down Expand Up @@ -311,15 +298,6 @@ def get_flux_lc(config: dict, type: Optional[str] = "flux") -> LightCurveEstimat
e_max = 100 # config["spectrum"]["e_max"]
nbin = config["spectrum"]["e_bins"]

# Currently unused?
# _selection = dict(
# type="sky_circle",
# frame="icrs",
# lon=source_pos.ra,
# lat=source_pos.dec,
# radius=2 * u.deg,
# )

# energy binning
energy_axis = MapAxis.from_energy_bounds(
str(e_min) + " TeV", str(e_max) + " TeV", nbin
Expand Down Expand Up @@ -351,19 +329,6 @@ def get_flux_lc(config: dict, type: Optional[str] = "flux") -> LightCurveEstimat
)
)

# exclude nearby HAWC sources
# hawc = SourceCatalog3HWC(environ["GAMMAPY_DATA"] + "/catalogs/3HWC.ecsv")
# hawc_mask = np.sqrt(
# (hawc.table["ra"] - source_pos.ra.deg)**2 +
# (hawc.table["dec"] - source_pos.dec.deg)**2
# ) < 2.5
# for src in hawc.table[hawc_mask]:
# if src['search_radius'] > 0:
# exclusion_regions.append(CircleSkyRegion(center=SkyCoord(src['ra'],src['dec'],unit='deg',frame='icrs'),radius=src['search_radius']*u.deg))
# else:
# exclusion_regions.append(CircleSkyRegion(center=SkyCoord(src['ra'],src['dec'],unit='deg',frame='icrs'),radius=0.35*u.deg))

# exclude bright stars with 0.3 deg region (same as ED)
star_data = np.loadtxt(
environ["GAMMAPY_DATA"] + "/catalogs/Hipparcos_MAG8_1997.dat",
usecols=(0, 1, 2, 3, 4),
Expand Down Expand Up @@ -479,3 +444,164 @@ def get_flux_lc(config: dict, type: Optional[str] = "flux") -> LightCurveEstimat

lc_1d = lc_maker_1d.run(datasets=datasets)
return lc_1d

def make_spectrum_fov(config,plot=True):
data_store = DataStore.from_dir(config["io"]["out_dir"])
if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header
hdul = fits.open(
config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0]
)
source_pos = SkyCoord(
hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg
)
else: # get position from ra/dec [deg]
source_pos = SkyCoord(
config["run_selection"]["source_ra"],
config["run_selection"]["source_dec"],
frame="icrs",
unit="deg",
)
selection = dict(
type="sky_circle",
frame="icrs",
lon=source_pos.ra,
lat=source_pos.dec,
radius="5 deg",
)
selected_obs_table = data_store.obs_table.select_observations(selection)
observations = data_store.get_observations(selected_obs_table["OBS_ID"])
# We now fix the energy axis for the counts map - (the reconstructed energy binning)
e_min = config["spectrum"]["e_min"]
e_max = config["spectrum"]["e_max"]
nbins = 10

energy_axis = MapAxis.from_energy_bounds(e_min*u.TeV,e_max*u.TeV,nbins)

# We now fix the energy axis for the IRF maps (exposure, etc) - (the true energy binning)
energy_axis_true = MapAxis.from_energy_bounds(0.01,100,30,unit="TeV",name="energy_true")

geom = WcsGeom.create(
skydir=source_pos,
binsz=config["sky_map"]["bin_sz"],
width=(config["sky_map"]["map_deg"]*u.deg,config["sky_map"]["map_deg"]*u.deg),
frame="icrs",
proj="CAR",
axes=[energy_axis],
)
stacked = MapDataset.create(
geom=geom, energy_axis_true=energy_axis_true, name="stacked"
)

# apply exclusion regions
exclusion_regions = []

exclusion_regions.append(
CircleSkyRegion(
center=source_pos, radius=config["sky_map"]["on_exclusion_region"] * u.deg
)
)

if (
len(config["sky_map"]["exclusion_regions"]) > 0
): # should be a list of CircleSkyRegions
for region in config["sky_map"]["exclusion_regions"]:
ra, dec = region[0]
radius = region[1]
exclusion_regions.append(
CircleSkyRegion(
center=SkyCoord(ra, dec, unit="deg", frame="icrs"),
radius=radius * u.deg,
)
)
# exclude bright stars
star_data = np.loadtxt(
environ["GAMMAPY_DATA"] + "/catalogs/Hipparcos_MAG8_1997.dat",
usecols=(0, 1, 2, 3, 4),
)
star_cat = Table(
{
"ra": star_data[:, 0],
"dec": star_data[:, 1],
"id": star_data[:, 2],
"mag": star_data[:, 3],
"colour": star_data[:, 4],
}
)
star_mask = (
np.sqrt(
(star_cat["ra"] - source_pos.ra.deg) ** 2
+ (star_cat["dec"] - source_pos.dec.deg) ** 2
)
< 2.0
)
star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][
"min_star_brightness"
]

# append stars to exclusion list
for src in star_cat[star_mask]:
exclusion_regions.append(
CircleSkyRegion(
center=SkyCoord(src["ra"], src["dec"], unit="deg", frame="icrs"),
radius=0.3 * u.deg,
)
)
offset_max = config["sky_map"]["offset_max"] * u.deg
maker = MapDatasetMaker()
maker_safe_mask = SafeMaskMaker(
methods=["offset-max", "aeff-default"], offset_max=offset_max, aeff_percent=10
)

circle = CircleSkyRegion(center=source_pos, radius=config["sky_map"]["on_exclusion_region"] * u.deg)
exclusion_regions.append(circle)
exclusion_mask = ~geom.region_mask(regions=exclusion_regions)
maker_fov = FoVBackgroundMaker(method="scale", exclusion_mask=exclusion_mask)

for obs in observations:
# A MapDataset is filled in this cutout geometry
dataset = maker.run(stacked, obs)
# The data quality cut is applied
dataset = maker_safe_mask.run(dataset, obs)
# fit background model
dataset = maker_fov.run(dataset)
#print(
# f"Background norm obs {obs.obs_id}: {dataset.background_model.spectral_model.norm.value:.2f} +/- {dataset.background_model.spectral_model.norm.error:.2f}"
#)
# The resulting dataset cutout is stacked onto the final one
stacked.stack(dataset)

norm,idx = config["spectrum"]["params"]
spatial_model = GaussianSpatialModel(
lon_0=source_pos.ra, lat_0=source_pos.dec, sigma=config["sky_map"]["theta"]*u.deg, frame="icrs"
)

spectral_model = PowerLawSpectralModel(
index=idx,
amplitude=norm * u.Unit("1 / (cm2 s TeV)"),
reference=1 * u.TeV,
)
sky_model = SkyModel(
spatial_model=spatial_model, spectral_model=spectral_model, name="src"
)
bkg_model = FoVBackgroundModel(dataset_name="stacked")
stacked.models = [sky_model, bkg_model]
fit = Fit(optimize_opts={"print_level": 0})
result = fit.run([stacked])

spec = sky_model.spectral_model
energy_bounds = [e_min,e_max] * u.TeV

fpe = FluxPointsEstimator(
energy_edges=np.logspace(np.log10(e_min),np.log10(e_max),nbins)*u.TeV,
source="src",
n_sigma_ul=2,
selection_optional='all')
flux_points = fpe.run(datasets=[stacked])

if plot:
flux_points.plot(energy_power=2)
spec.plot(energy_bounds=[e_min,e_max]*u.TeV, energy_power=2)
spec.plot_error(energy_bounds=[e_min,e_max]*u.TeV, energy_power=2)
plt.show()

return flux_points,spec
Loading

0 comments on commit c9e5233

Please sign in to comment.