From ffca60ac9587b1a90e55bca2ddb5830e0e445038 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 10:17:36 -0400 Subject: [PATCH 1/8] fix(gammapy_tools/make_background/prepare_data.py): adding overwrite option adding option to overwrite the output directory when preparing datasets --- gammapy_tools/make_background/prepare_data.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/gammapy_tools/make_background/prepare_data.py b/gammapy_tools/make_background/prepare_data.py index f370025..4e62f0c 100644 --- a/gammapy_tools/make_background/prepare_data.py +++ b/gammapy_tools/make_background/prepare_data.py @@ -6,14 +6,15 @@ from gammapy.data import DataStore -def prepare_dataset(config: dict) -> dict: +def prepare_dataset(config: dict, overwrite: bool = False) -> dict: """Prepare a dataset for analysis. Extract runs of interest from an existing datastore, reporting any missing runs. Parameters ---------- config (dict) - dictionary with config information - + overwrite (bool) - bool to control whether or not to overwrite + the output directory Returns ---------- config (dict) - dictionary with config information @@ -51,7 +52,7 @@ def prepare_dataset(config: dict) -> dict: # Copy try: - data_store.copy_obs(obs_in_db, in_dir) + data_store.copy_obs(obs_in_db, in_dir, overwrite=overwrite) except Exception as e: if len(obs_in_db) == 0: raise RuntimeError( From 41b04df1c83d84fd37644211d081a77b8b0c49f0 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:11:27 -0400 Subject: [PATCH 2/8] fix(pyproject.toml): fixing versioning --- .cz.toml | 7 ------- gammapy_tools/__init__.py | 4 +++- gammapy_tools/_version.py | 3 ++- pyproject.toml | 24 ++++++++---------------- 4 files changed, 13 insertions(+), 25 deletions(-) delete mode 100644 .cz.toml diff --git a/.cz.toml b/.cz.toml deleted file mode 100644 index 4b4c90d..0000000 --- a/.cz.toml +++ /dev/null @@ -1,7 +0,0 @@ -[tool.commitizen] -name = "cz_conventional_commits" -tag_format = "$version" -version_scheme = "semver" -version = "0.1.1" -update_changelog_on_bump = true -major_version_zero = true diff --git a/gammapy_tools/__init__.py b/gammapy_tools/__init__.py index c73820d..68eb37c 100644 --- a/gammapy_tools/__init__.py +++ b/gammapy_tools/__init__.py @@ -2,10 +2,12 @@ from . import make_background from . import templates from . import fake_source_coordinates - +from ._version import __version__ __all__ = ( analysis.__all__ + make_background.__all__ + templates.__all__ + fake_source_coordinates.__all__ + + ["__version__"] ) + diff --git a/gammapy_tools/_version.py b/gammapy_tools/_version.py index 8f32dc2..7a349da 100644 --- a/gammapy_tools/_version.py +++ b/gammapy_tools/_version.py @@ -3,6 +3,7 @@ TYPE_CHECKING = False if TYPE_CHECKING: from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] else: VERSION_TUPLE = object @@ -13,4 +14,4 @@ version_tuple: VERSION_TUPLE __version__ = version = "0.1.3.dev82+gf90456e.d20240402" -__version_tuple__ = version_tuple = (0, 1, 3, "dev82", "gf90456e.d20240402") \ No newline at end of file +__version_tuple__ = version_tuple = (0, 1, 3, "dev82", "gf90456e.d20240402") diff --git a/pyproject.toml b/pyproject.toml index 481539d..d7d1eee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,29 +13,17 @@ packages=[ ] [tool.setuptools_scm] -write_to = "gammapy_tools/_version.py" +write_to = "gammapy_tools/__version__.py" [project] name = "gammapy_tools" dynamic = ["version"] -description = "VERITAS (VEGAS and Eventdisplay) to DL3 Converter" +description = "Tools for gammapy analysis for the VERITAS telescope" readme = "README.md" license = { file="LICENSE" } requires-python = ">=3.9" -#dependencies = [ -# "gammapy==1.2", -# "pydantic<=1.10", -# "scipy==1.11.4", -# "astropy==5.3.4", -# "astroquery==0.4.6", -# "matplotlib==3.8.2", -# "multiprocess==0.70.16", -# "numpy==1.26.4", -# #"pyV2DL3==0.5", -# "PyYAML==6.0.1", -# "regions==0.8", -#] + dependencies = [ "gammapy>=1.1", @@ -65,4 +53,8 @@ dependencies = [ [project.scripts] [tool.commitizen] -version = "0.1.3" # This should be your current semver version \ No newline at end of file +name = "cz_conventional_commits" +tag_format = "$version" +version_scheme = "semver" +version_provider = "scm" +update_changelog_on_bump = true From 8d0397b199557cd4b4ce986518bb1bac3ef11b03 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:13:38 -0400 Subject: [PATCH 3/8] refactor(__version__): switching to __version__ for the version number using __version__ instead of _version --- gammapy_tools/__init__.py | 2 +- gammapy_tools/{_version.py => __version__.py} | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) rename gammapy_tools/{_version.py => __version__.py} (71%) diff --git a/gammapy_tools/__init__.py b/gammapy_tools/__init__.py index 68eb37c..e580542 100644 --- a/gammapy_tools/__init__.py +++ b/gammapy_tools/__init__.py @@ -2,7 +2,7 @@ from . import make_background from . import templates from . import fake_source_coordinates -from ._version import __version__ +from .__version__ import __version__ __all__ = ( analysis.__all__ + make_background.__all__ diff --git a/gammapy_tools/_version.py b/gammapy_tools/__version__.py similarity index 71% rename from gammapy_tools/_version.py rename to gammapy_tools/__version__.py index 7a349da..95b0ef0 100644 --- a/gammapy_tools/_version.py +++ b/gammapy_tools/__version__.py @@ -3,7 +3,6 @@ TYPE_CHECKING = False if TYPE_CHECKING: from typing import Tuple, Union - VERSION_TUPLE = Tuple[Union[int, str], ...] else: VERSION_TUPLE = object @@ -13,5 +12,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = "0.1.3.dev82+gf90456e.d20240402" -__version_tuple__ = version_tuple = (0, 1, 3, "dev82", "gf90456e.d20240402") +__version__ = version = '1.0.1.dev2+g4d0432e.d20240509' +__version_tuple__ = version_tuple = (1, 0, 1, 'dev2', 'g4d0432e.d20240509') From e85b80f4974eaaefffce7997b82e7fbc4a6154f1 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:16:42 -0400 Subject: [PATCH 4/8] refactor(gammapy_tools/__version__.py): post test update --- gammapy_tools/__version__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gammapy_tools/__version__.py b/gammapy_tools/__version__.py index 95b0ef0..8f83c55 100644 --- a/gammapy_tools/__version__.py +++ b/gammapy_tools/__version__.py @@ -12,5 +12,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '1.0.1.dev2+g4d0432e.d20240509' -__version_tuple__ = version_tuple = (1, 0, 1, 'dev2', 'g4d0432e.d20240509') +__version__ = version = '1.0.1.dev4+g8d0397b.d20240509' +__version_tuple__ = version_tuple = (1, 0, 1, 'dev4', 'g8d0397b.d20240509') From 0ff4a86af33495f19309b21300252f1cbf5af549 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:22:23 -0400 Subject: [PATCH 5/8] build(.github/workflows/bump_version.yml): re-enabling bump_version github action #10 --- .github/workflows/bump_version.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/bump_version.yml b/.github/workflows/bump_version.yml index 5bba4d2..5f6b4be 100644 --- a/.github/workflows/bump_version.yml +++ b/.github/workflows/bump_version.yml @@ -4,7 +4,8 @@ on: push: branches: # disabling it for now - - _main + - main + - *_rc jobs: bump_version: From a4188f2b36411ac88981ab861f3921dfd86897e3 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:28:58 -0400 Subject: [PATCH 6/8] build(pyproject.toml): adding versions --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index d7d1eee..25d8473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,3 +58,8 @@ tag_format = "$version" version_scheme = "semver" version_provider = "scm" update_changelog_on_bump = true +changelog_incremental = true +version_files = [ + "__version__.py", + "pyproject.toml:version" +] From 1f1384e71bf9122c27c0c3b0cc97611c4de24fd8 Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 11:32:05 -0400 Subject: [PATCH 7/8] =?UTF-8?q?bump:=20version=201.0.0=20=E2=86=92=201.0.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 51 +++++-- gammapy_tools/analysis/data_products.py | 130 +++++++++++------- .../analysis/data_products_LOCAL_4133777.py | 66 +++++---- .../analysis/data_products_LOCAL_4133860.py | 66 +++++---- gammapy_tools/analysis/rbm.py | 78 ++++++----- .../make_background/make_background.py | 22 ++- gammapy_tools/utils/run_details.py | 5 +- pyproject.toml | 2 +- 8 files changed, 268 insertions(+), 152 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ee11ac..9f21ee0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,31 +1,64 @@ -## Unreleased - -### Feat - -- **background_models**: adding a user defined smoothing sigma +## 1.0.1 (2024-05-09) ### Fix -- **gammapy_tools/analysis/rbm.py**: removing hard coded map size values +- **pyproject.toml**: fixing versioning +- **gammapy_tools/make_background/prepare_data.py**: adding overwrite option ### Refactor -- **analysis_notebook**: updating example -- **data_products**: removing plt.show +- **gammapy_tools/__version__.py**: post test update +- **__version__**: switching to __version__ for the version number + +## 1.0.0 (2024-05-07) -## 0.1.3 (2024-02-07) +### Feat + +- **utils/exclusion_finder.py**: adding exclusion finder +- **gammapy_tools/fake_source_coordinates/process.py**: adding mimic_data +- **make_background**: parallel reduction of background and mimic search +- **gammapy_tools/make_background**: implementing a parallel + reduction method to spead up background generation +- **make_background**: implementation of closed N background runs +- **background_models**: adding a user defined smoothing sigma ### Fix +- **pyproject.toml**: changing gammapy version +- **gammapy_tools/make_background/background_models.py**: updating to gammapy1.2 +- **gammapy_tools/utils/exclusion_finder.py**: adding a check for gammacat and hawc +- **gammapy_tools/utils/exclusion_finder.py**: correcting path +- **gammapy_tools/utils/exclusion_finder.py**: correcting imports and file paths +- **fake_source_coordinates/process.py**: adding safety check +- **gammapy_tools/fake_source_coordinates/process.py**: fixing print and removing target +- **process.py**: correcting background +- **make_background**: storing kl_div table +- **background_tools.py**: exposure searching +- **prepare_data.py**: adding more info +- **prepare_data.py**: raise error when no runs are found +- **gammapy_tools/analysis/rbm.py**: removing hard coded map size values +- **Hipparcos_MAG8_1997.dat**: updated hipparcos file to have correct colour column - **analysis**: update the analysis scripts to work with config files ### Refactor +- **gammapy_tools/fake_source_coordinates/process.py**: adding more robust file finder +- **fake_source_coordinates/process.py**: reincluding source when scrambling +- **gammapy_tools/fake_source_coordinates/fake_location.py**: popping exisiting background +- **make_background**: removing ignore warnings +- **make_background.py**: removing additional background and allow previous method +- **templates/config.py**: adding KL_DIV +- **utils**: moving functions to utils +- **analysis_notebook**: updating example +- **data_products**: removing plt.show - **_version.**: bumping version - **analysis_notebook**: migrating analysis notebook - **fake_source_coordinates**: adding __all__ - **pyproject.toml**: changing required to minimum python version +### Perf + +- **gammapy_tools/utils/run_details.py**: Changed the mimic selection to perform a nested search + ## 0.1.2 (2024-01-30) ### Fix diff --git a/gammapy_tools/analysis/data_products.py b/gammapy_tools/analysis/data_products.py index 6fdd9dd..fbd4fc8 100644 --- a/gammapy_tools/analysis/data_products.py +++ b/gammapy_tools/analysis/data_products.py @@ -1,4 +1,3 @@ -from pathlib import Path import numpy as np import astropy.units as u from astropy.coordinates import Angle, SkyCoord @@ -11,7 +10,6 @@ # %matplotlib inline import matplotlib.pyplot as plt -from IPython.display import display # gammapy imports from gammapy.data import DataStore @@ -35,11 +33,8 @@ SkyModel, ) from gammapy.estimators import LightCurveEstimator -from gammapy.analysis import Analysis, AnalysisConfig -from gammapy.catalog import SourceCatalogGammaCat, SourceCatalog3HWC from gammapy.visualization import plot_spectrum_datasets_off_regions -from gammapy_tools.utils import exclusion_finder def make_spectrum_RE(config, plot=True, return_stacked=False): """Make a RE spectrum @@ -69,8 +64,12 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): observations = datastore.get_observations() if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -135,8 +134,10 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): ) < 2.0 ) - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] - + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] + # append stars to exclusion list for src in star_cat[star_mask]: exclusion_regions.append( @@ -180,13 +181,15 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): info_table = datasets.info_table(cumulative=True) time = info_table["livetime"].to("h") sig = info_table["sqrt_ts"] - - #plot exclusion regions and reflected regions - if len(exclusion_regions) > 0 and plot: - plt.figure(figsize=(8,8)) + + # plot exclusion regions and reflected regions + if len(exclusion_regions) > 0 and plot: + plt.figure(figsize=(8, 8)) ax = exclusion_mask.plot() - on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta",label='ON') - plot_spectrum_datasets_off_regions(ax=ax, datasets=datasets) #add legend=True to plot run numbers associated w/ OFF regions + on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta", label="ON") + plot_spectrum_datasets_off_regions( + ax=ax, datasets=datasets + ) # add legend=True to plot run numbers associated w/ OFF regions plt.show() # make spectrum model from user input @@ -221,7 +224,10 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): energy_edges = np.geomspace(e_min, e_max, e_bins) * u.TeV fpe = FluxPointsEstimator( - energy_edges=energy_edges, source="my_source", selection_optional="all",n_sigma_ul=2 + energy_edges=energy_edges, + source="my_source", + selection_optional="all", + n_sigma_ul=2, ) flux_points = fpe.run(datasets=datasets) @@ -239,9 +245,10 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): return flux_points, result_joint.models, time, sig -def get_integral_flux(config,spectral_model): + +def get_integral_flux(config, spectral_model): """Outputs integral flux calculated from a spectral model - + Parameters ---------- config: configuration file @@ -251,8 +258,12 @@ def get_integral_flux(config,spectral_model): flux_points: flux points object """ if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -306,8 +317,10 @@ def get_integral_flux(config,spectral_model): ) < 2.0 ) - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] - + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] + # append stars to exclusion list for src in star_cat[star_mask]: exclusion_regions.append( @@ -336,7 +349,7 @@ def get_integral_flux(config,spectral_model): exclusion_mask.plot() if plot: plt.show() - + # create reflected regions background bkg_maker = ReflectedRegionsBackgroundMaker(exclusion_mask=exclusion_mask) safe_mask_masker = SafeMaskMaker(methods=["aeff-max"], aeff_percent=10) @@ -350,23 +363,23 @@ def get_integral_flux(config,spectral_model): # construct spectral model spectral_model = PowerLawSpectralModel( - amplitude=float(norm) * u.Unit("cm-2 s-1 TeV-1"), - index=float(index), - reference=1 * u.TeV, - ) - + amplitude=float(norm) * u.Unit("cm-2 s-1 TeV-1"), + index=float(index), + reference=1 * u.TeV, + ) model = SkyModel(spectral_model=spectral_model, name="my_source") datasets.models = [model] fit_joint = Fit() result_joint = fit_joint.run(datasets=datasets) - - # get flux by integrating spectral model - flux = result_join.models.integral(threshold*u.TeV, 30*u.TeV).value - flux_err = result_join.models.integral_error(threshold*u.TeV, 30*u.TeV)[1].value + + # get flux by integrating spectral model + flux = result_join.models.integral(threshold * u.TeV, 30 * u.TeV).value + flux_err = result_join.models.integral_error(threshold * u.TeV, 30 * u.TeV)[1].value return flux, flux_err + def get_flux_lc(config, type="flux"): """Output run-wise flux points and the overall flux of a 1D dataset @@ -383,17 +396,24 @@ def get_flux_lc(config, type="flux"): """ theta = config["sky_map"]["theta"] datastore = DataStore.from_dir(config["io"]["out_dir"]) - + if config["io"]["from_runlist"]: - observations = datastore.get_observations(obs_id=np.genfromtxt(config["io"]["runlist"],unpack=True),required_irf="all-optional") + observations = datastore.get_observations( + obs_id=np.genfromtxt(config["io"]["runlist"], unpack=True), + required_irf="all-optional", + ) else: observations = datastore.get_observations(required_irf="all-optional") - + amp, idx = config["lightcurve"]["params"] - + if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -401,9 +421,9 @@ def get_flux_lc(config, type="flux"): frame="icrs", unit="deg", ) - + e_min = config["spectrum"]["e_min"] - e_max = 100 #config["spectrum"]["e_max"] + e_max = 100 # config["spectrum"]["e_max"] nbin = config["spectrum"]["e_bins"] selection = dict( @@ -426,8 +446,12 @@ def get_flux_lc(config, type="flux"): # exclusion regions exclusion_regions = [] - exclusion_regions.append(CircleSkyRegion(center=source_pos,radius=config["sky_map"]["on_exclusion_region"]*u.deg)) - + exclusion_regions.append( + CircleSkyRegion( + center=source_pos, radius=config["sky_map"]["on_exclusion_region"] * u.deg + ) + ) + if ( len(config["sky_map"]["exclusion_regions"]) > 0 ): # should be a list of CircleSkyRegions @@ -464,7 +488,7 @@ def get_flux_lc(config, type="flux"): "dec": star_data[:, 1], "id": star_data[:, 2], "mag": star_data[:, 3], - "colour": star_data[:,4], + "colour": star_data[:, 4], } ) star_mask = ( @@ -475,8 +499,10 @@ def get_flux_lc(config, type="flux"): < 2.0 ) - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] - + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] + for src in star_cat[star_mask]: exclusion_regions.append( CircleSkyRegion( @@ -485,7 +511,7 @@ def get_flux_lc(config, type="flux"): ) ) - #create exclusion mask + # create exclusion mask geom = WcsGeom.create( npix=(150, 150), binsz=0.05, skydir=source_pos, proj="TAN", frame="icrs" ) @@ -511,11 +537,11 @@ def get_flux_lc(config, type="flux"): if type == "flux": time_intervals = [Time([start, stop])] lc_maker_1d = LightCurveEstimator( - energy_edges=[e_min,e_max] * u.TeV, + energy_edges=[e_min, e_max] * u.TeV, time_intervals=time_intervals, n_sigma_ul=2, reoptimize=False, - selection_optional='all' + selection_optional="all", ) short_observations = observations.select_time(time_intervals) @@ -537,9 +563,7 @@ def get_flux_lc(config, type="flux"): if type == "runwise": lc_maker_1d = LightCurveEstimator( - energy_edges=[e_min, e_max] * u.TeV, - selection_optional=None, - n_sigma_ul=2 + energy_edges=[e_min, e_max] * u.TeV, selection_optional=None, n_sigma_ul=2 ) short_observations = observations @@ -556,7 +580,7 @@ def get_flux_lc(config, type="flux"): spectral_model = PowerLawSpectralModel( index=float(idx), amplitude=float(amp) * u.Unit("1 / (cm2 s TeV)"), - reference=1 * u.TeV + reference=1 * u.TeV, ) sky_model = SkyModel( spatial_model=None, spectral_model=spectral_model, name="model" @@ -564,7 +588,7 @@ def get_flux_lc(config, type="flux"): sky_model.parameters["index"].frozen = True sky_model.parameters["reference"].frozen = True - + datasets.models = sky_model lc_1d = lc_maker_1d.run(datasets=datasets) diff --git a/gammapy_tools/analysis/data_products_LOCAL_4133777.py b/gammapy_tools/analysis/data_products_LOCAL_4133777.py index 7b8f23a..ccf53a4 100644 --- a/gammapy_tools/analysis/data_products_LOCAL_4133777.py +++ b/gammapy_tools/analysis/data_products_LOCAL_4133777.py @@ -1,4 +1,3 @@ -from pathlib import Path import numpy as np import astropy.units as u from astropy.coordinates import Angle, SkyCoord @@ -11,7 +10,6 @@ # %matplotlib inline import matplotlib.pyplot as plt -from IPython.display import display # gammapy imports from gammapy.data import DataStore @@ -35,10 +33,9 @@ SkyModel, ) from gammapy.estimators import LightCurveEstimator -from gammapy.analysis import Analysis, AnalysisConfig -from gammapy.catalog import SourceCatalogGammaCat, SourceCatalog3HWC from gammapy.visualization import plot_spectrum_datasets_off_regions + def make_spectrum_RE(config, plot=True, return_stacked=False): """Make a RE spectrum @@ -67,8 +64,12 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): observations = datastore.get_observations() if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -148,7 +149,9 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): < 2.0 ) # star_mask &= (star_cat["mag"] + star_cat["colour"]) < 8 - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] for src in star_cat[star_mask]: exclusion_regions.append( @@ -192,13 +195,15 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): info_table = datasets.info_table(cumulative=True) time = info_table["livetime"].to("h") sig = info_table["sqrt_ts"] - - #plot exclusion regions and reflected regions - if len(exclusion_regions) > 0 and plot: - plt.figure(figsize=(8,8)) + + # plot exclusion regions and reflected regions + if len(exclusion_regions) > 0 and plot: + plt.figure(figsize=(8, 8)) ax = exclusion_mask.plot() - on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta",label='ON') - plot_spectrum_datasets_off_regions(ax=ax, datasets=datasets) #add legend=True to plot run numbers associated w/ OFF regions + on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta", label="ON") + plot_spectrum_datasets_off_regions( + ax=ax, datasets=datasets + ) # add legend=True to plot run numbers associated w/ OFF regions plt.show() # make spectrum model from user input @@ -272,17 +277,24 @@ def get_flux_lc(config, type="flux"): theta = config["sky_map"]["theta"] datastore = DataStore.from_dir(config["io"]["out_dir"]) - + if config["io"]["from_runlist"]: - observations = datastore.get_observations(obs_id=np.genfromtxt(config["io"]["runlist"],unpack=True),required_irf="all-optional") + observations = datastore.get_observations( + obs_id=np.genfromtxt(config["io"]["runlist"], unpack=True), + required_irf="all-optional", + ) else: observations = datastore.get_observations(required_irf="all-optional") - + amp, idx = config["spectrum"]["params"] - + if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -290,7 +302,7 @@ def get_flux_lc(config, type="flux"): frame="icrs", unit="deg", ) - + e_min = config["spectrum"]["e_min"] e_max = config["spectrum"]["e_max"] nbin = config["spectrum"]["e_bins"] @@ -315,13 +327,13 @@ def get_flux_lc(config, type="flux"): # exclusion regions exclusion_regions = [] - + exclusion_regions.append( CircleSkyRegion( center=source_pos, radius=config["sky_map"]["on_exclusion_region"] * u.deg ) ) - + if ( len(config["sky_map"]["exclusion_regions"]) > 0 ): # should be a list of CircleSkyRegions @@ -358,7 +370,7 @@ def get_flux_lc(config, type="flux"): "dec": star_data[:, 1], "id": star_data[:, 2], "mag": star_data[:, 3], - "colour": star_data[:,4], + "colour": star_data[:, 4], } ) star_mask = ( @@ -369,8 +381,10 @@ def get_flux_lc(config, type="flux"): < 2.0 ) - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] - + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] + for src in star_cat[star_mask]: exclusion_regions.append( CircleSkyRegion( @@ -404,7 +418,7 @@ def get_flux_lc(config, type="flux"): if type == "flux": time_intervals = [Time([start, stop])] lc_maker_1d = LightCurveEstimator( - energy_edges=[e_min,e_max] * u.TeV, + energy_edges=[e_min, e_max] * u.TeV, time_intervals=time_intervals, n_sigma_ul=2, reoptimize=False, diff --git a/gammapy_tools/analysis/data_products_LOCAL_4133860.py b/gammapy_tools/analysis/data_products_LOCAL_4133860.py index 7b8f23a..ccf53a4 100644 --- a/gammapy_tools/analysis/data_products_LOCAL_4133860.py +++ b/gammapy_tools/analysis/data_products_LOCAL_4133860.py @@ -1,4 +1,3 @@ -from pathlib import Path import numpy as np import astropy.units as u from astropy.coordinates import Angle, SkyCoord @@ -11,7 +10,6 @@ # %matplotlib inline import matplotlib.pyplot as plt -from IPython.display import display # gammapy imports from gammapy.data import DataStore @@ -35,10 +33,9 @@ SkyModel, ) from gammapy.estimators import LightCurveEstimator -from gammapy.analysis import Analysis, AnalysisConfig -from gammapy.catalog import SourceCatalogGammaCat, SourceCatalog3HWC from gammapy.visualization import plot_spectrum_datasets_off_regions + def make_spectrum_RE(config, plot=True, return_stacked=False): """Make a RE spectrum @@ -67,8 +64,12 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): observations = datastore.get_observations() if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -148,7 +149,9 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): < 2.0 ) # star_mask &= (star_cat["mag"] + star_cat["colour"]) < 8 - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] for src in star_cat[star_mask]: exclusion_regions.append( @@ -192,13 +195,15 @@ def make_spectrum_RE(config, plot=True, return_stacked=False): info_table = datasets.info_table(cumulative=True) time = info_table["livetime"].to("h") sig = info_table["sqrt_ts"] - - #plot exclusion regions and reflected regions - if len(exclusion_regions) > 0 and plot: - plt.figure(figsize=(8,8)) + + # plot exclusion regions and reflected regions + if len(exclusion_regions) > 0 and plot: + plt.figure(figsize=(8, 8)) ax = exclusion_mask.plot() - on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta",label='ON') - plot_spectrum_datasets_off_regions(ax=ax, datasets=datasets) #add legend=True to plot run numbers associated w/ OFF regions + on_region.to_pixel(ax.wcs).plot(ax=ax, edgecolor="magenta", label="ON") + plot_spectrum_datasets_off_regions( + ax=ax, datasets=datasets + ) # add legend=True to plot run numbers associated w/ OFF regions plt.show() # make spectrum model from user input @@ -272,17 +277,24 @@ def get_flux_lc(config, type="flux"): theta = config["sky_map"]["theta"] datastore = DataStore.from_dir(config["io"]["out_dir"]) - + if config["io"]["from_runlist"]: - observations = datastore.get_observations(obs_id=np.genfromtxt(config["io"]["runlist"],unpack=True),required_irf="all-optional") + observations = datastore.get_observations( + obs_id=np.genfromtxt(config["io"]["runlist"], unpack=True), + required_irf="all-optional", + ) else: observations = datastore.get_observations(required_irf="all-optional") - + amp, idx = config["spectrum"]["params"] - + if config["run_selection"]["pos_from_DL3"]: # get position from DL3 header - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) else: # get position from ra/dec [deg] source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -290,7 +302,7 @@ def get_flux_lc(config, type="flux"): frame="icrs", unit="deg", ) - + e_min = config["spectrum"]["e_min"] e_max = config["spectrum"]["e_max"] nbin = config["spectrum"]["e_bins"] @@ -315,13 +327,13 @@ def get_flux_lc(config, type="flux"): # exclusion regions exclusion_regions = [] - + exclusion_regions.append( CircleSkyRegion( center=source_pos, radius=config["sky_map"]["on_exclusion_region"] * u.deg ) ) - + if ( len(config["sky_map"]["exclusion_regions"]) > 0 ): # should be a list of CircleSkyRegions @@ -358,7 +370,7 @@ def get_flux_lc(config, type="flux"): "dec": star_data[:, 1], "id": star_data[:, 2], "mag": star_data[:, 3], - "colour": star_data[:,4], + "colour": star_data[:, 4], } ) star_mask = ( @@ -369,8 +381,10 @@ def get_flux_lc(config, type="flux"): < 2.0 ) - star_mask &= (star_cat["mag"]+star_cat["colour"])< config["sky_map"]["min_star_brightness"] - + star_mask &= (star_cat["mag"] + star_cat["colour"]) < config["sky_map"][ + "min_star_brightness" + ] + for src in star_cat[star_mask]: exclusion_regions.append( CircleSkyRegion( @@ -404,7 +418,7 @@ def get_flux_lc(config, type="flux"): if type == "flux": time_intervals = [Time([start, stop])] lc_maker_1d = LightCurveEstimator( - energy_edges=[e_min,e_max] * u.TeV, + energy_edges=[e_min, e_max] * u.TeV, time_intervals=time_intervals, n_sigma_ul=2, reoptimize=False, diff --git a/gammapy_tools/analysis/rbm.py b/gammapy_tools/analysis/rbm.py index f3fde87..90d9327 100644 --- a/gammapy_tools/analysis/rbm.py +++ b/gammapy_tools/analysis/rbm.py @@ -2,7 +2,6 @@ import yaml import numpy as np from scipy.stats import norm -from IPython.display import display import os from astropy.io import fits from os import environ @@ -19,11 +18,9 @@ from gammapy.datasets import MapDatasetOnOff from gammapy.estimators import ExcessMapEstimator from gammapy.makers import RingBackgroundMaker -from gammapy.data import DataStore from gammapy.modeling.models import PowerLawSpectralModel -from astroquery.simbad import Simbad log = logging.getLogger(__name__) @@ -55,16 +52,22 @@ def rbm_analysis(config): source_config = AnalysisConfig() source_config.datasets.type = "3d" source_config.observations.datastore = data_store - - #select only observations from runlist, if specified + + # select only observations from runlist, if specified if config["io"]["from_runlist"]: - source_config.observations.obs_ids = np.genfromtxt(config["io"]["runlist"],unpack=True).tolist() - + source_config.observations.obs_ids = np.genfromtxt( + config["io"]["runlist"], unpack=True + ).tolist() + if config["run_selection"]["pos_from_DL3"]: - #get RA and DEC from first run - hdul = fits.open(config["io"]["out_dir"]+os.listdir(config["io"]["out_dir"])[0]) - source_pos = SkyCoord(hdul[1].header["RA_OBJ"]*u.deg, hdul[1].header["DEC_OBJ"]*u.deg) - + # get RA and DEC from first run + hdul = fits.open( + config["io"]["out_dir"] + os.listdir(config["io"]["out_dir"])[0] + ) + source_pos = SkyCoord( + hdul[1].header["RA_OBJ"] * u.deg, hdul[1].header["DEC_OBJ"] * u.deg + ) + else: source_pos = SkyCoord( config["run_selection"]["source_ra"], @@ -72,7 +75,7 @@ def rbm_analysis(config): frame="icrs", unit="deg", ) - + source_config.datasets.geom.wcs.skydir = { "lon": source_pos.ra, "lat": source_pos.dec, @@ -92,17 +95,26 @@ def rbm_analysis(config): source_config.datasets.geom.selection.offset_max = map_deg * u.deg # We now fix the energy axis for the counts map - (the reconstructed energy binning) - source_config.datasets.geom.axes.energy.min = str(config["sky_map"]["e_min"])+" TeV" - source_config.datasets.geom.axes.energy.max = str(config["sky_map"]["e_max"])+" TeV" + source_config.datasets.geom.axes.energy.min = ( + str(config["sky_map"]["e_min"]) + " TeV" + ) + source_config.datasets.geom.axes.energy.max = ( + str(config["sky_map"]["e_max"]) + " TeV" + ) source_config.datasets.geom.axes.energy.nbins = 30 - source_config.excess_map.correlation_radius = str(config["sky_map"]["theta"]) + " deg" - + source_config.excess_map.correlation_radius = ( + str(config["sky_map"]["theta"]) + " deg" + ) + # We need to extract the ring for each observation separately, hence, no stacking at this stage source_config.datasets.stack = False - source_config.datasets.safe_mask.parameters = {'aeff_percent':config["sky_map"]["aeff_max_percent"], 'offset_max':config["sky_map"]["offset_max"]*u.deg} - source_config.datasets.safe_mask.methods = ['aeff-max','offset-max'] + source_config.datasets.safe_mask.parameters = { + "aeff_percent": config["sky_map"]["aeff_max_percent"], + "offset_max": config["sky_map"]["offset_max"] * u.deg, + } + source_config.datasets.safe_mask.methods = ["aeff-max", "offset-max"] analysis = Analysis(source_config) @@ -113,14 +125,14 @@ def rbm_analysis(config): analysis.get_observations() analysis.get_datasets() - #simbad = Simbad() - #simbad.reset_votable_fields() - #simbad.add_votable_fields("ra", "dec", "flux(B)", "flux(V)", "jp11") - #simbad.remove_votable_fields("coordinates") + # simbad = Simbad() + # simbad.reset_votable_fields() + # simbad.add_votable_fields("ra", "dec", "flux(B)", "flux(V)", "jp11") + # simbad.remove_votable_fields("coordinates") - #srcs_tab = simbad.query_region(source_pos, radius=1.5 * u.deg) - #srcs_tab = srcs_tab[srcs_tab["FLUX_B"] < config["sky_map"]["min_star_brightness"]] - #srcs_tab = srcs_tab[srcs_tab["FLUX_V"] != np.ma.masked] + # srcs_tab = simbad.query_region(source_pos, radius=1.5 * u.deg) + # srcs_tab = srcs_tab[srcs_tab["FLUX_B"] < config["sky_map"]["min_star_brightness"]] + # srcs_tab = srcs_tab[srcs_tab["FLUX_V"] != np.ma.masked] # get the geom that we use geom = analysis.datasets[0].counts.geom @@ -142,7 +154,7 @@ def rbm_analysis(config): radius=radius * u.deg, ) ) - + star_data = np.loadtxt( # environ["GAMMAPY_DATA"] + "/catalogs/Hipparcos_MAG8_1997.dat", usecols=(0, 1, 2, 3, 4) environ["GAMMAPY_DATA"] + "/catalogs/Hipparcos_MAG8_1997.dat", @@ -226,8 +238,8 @@ def rbm_analysis(config): sigma = output_dict["sqrt_ts"] exposure = output_dict["ontime"] - #significance_map_off = significance_map * exclusion_mask - #significance_map_off = significance_map[exclusion_mask] + # significance_map_off = significance_map * exclusion_mask + # significance_map_off = significance_map[exclusion_mask] return ( counts, @@ -285,7 +297,7 @@ def rbm_plots( config["plot_names"] + "sig_excess.png", format="png", bbox_inches="tight" ) plt.show() - + significance_map_off = significance_map * exclusion_mask # significance distribution @@ -299,7 +311,7 @@ def rbm_plots( alpha=0.5, color="red", label="all bins", - bins=np.linspace(-5,10,50), + bins=np.linspace(-5, 10, 50), ) ax.hist( @@ -308,7 +320,7 @@ def rbm_plots( alpha=0.5, color="blue", label="off bins", - bins=np.linspace(-5,10,50), + bins=np.linspace(-5, 10, 50), ) # Now, fit the off distribution with a Gaussian @@ -363,12 +375,12 @@ def rbm_plots( def write_validation_info( - config, spectral_model, flux, flux_err, counts, background, alpha, sigma, exposure + config, spectral_model, flux, flux_err, counts, background, alpha, sigma, exposure ): if not os.path.exists(config["io"]["results_dir"]): os.makedirs(config["io"]["results_dir"]) - + spectab = spectral_model.to_parameters_table() index = spectab["value"][0] index_err = spectab["error"][0] diff --git a/gammapy_tools/make_background/make_background.py b/gammapy_tools/make_background/make_background.py index 798b41b..d621c65 100644 --- a/gammapy_tools/make_background/make_background.py +++ b/gammapy_tools/make_background/make_background.py @@ -5,7 +5,8 @@ import numpy as np import yaml from os import listdir -from os.path import isfile, join, getsize +from os.path import isfile, join + # Astropy from astropy.io import fits from gammapy.data import DataStore @@ -418,12 +419,27 @@ def run_make_background(config: dict) -> dict: return config + def write_index_files(config): dl3_dir = config["io"]["out_dir"] - dl3Files = [dl3_dir + f for f in listdir(dl3_dir) if isfile(join(dl3_dir, f)) and (f.endswith(".fits") or (f.endswith(".fits.gz") and not f.startswith("obs") and not f.startswith("hdu"))) and (f.strip('.anasum.fits') )] - create_obs_hdu_index_file(dl3Files,index_file_dir=dl3_dir) + dl3Files = [ + dl3_dir + f + for f in listdir(dl3_dir) + if isfile(join(dl3_dir, f)) + and ( + f.endswith(".fits") + or ( + f.endswith(".fits.gz") + and not f.startswith("obs") + and not f.startswith("hdu") + ) + ) + and (f.strip(".anasum.fits")) + ] + create_obs_hdu_index_file(dl3Files, index_file_dir=dl3_dir) return + if __name__ == "__main__": with open(sys.argv[1], "r") as f: config = yaml.safe_load(f) diff --git a/gammapy_tools/utils/run_details.py b/gammapy_tools/utils/run_details.py index 9c8da53..1107765 100644 --- a/gammapy_tools/utils/run_details.py +++ b/gammapy_tools/utils/run_details.py @@ -141,7 +141,10 @@ def find_data_mimic( # mask in time if config["background_selection"]["same_szn"]: - sub_table = obs_table[(obs_date > list(season.values())[0]) & (obs_date < list(season.values())[1])] + sub_table = obs_table[ + (obs_date > list(season.values())[0]) + & (obs_date < list(season.values())[1]) + ] else: sub_table = obs_table[np.abs(obs_date - tobs) < time_max] diff --git a/pyproject.toml b/pyproject.toml index 25d8473..a1e092c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,6 @@ version_provider = "scm" update_changelog_on_bump = true changelog_incremental = true version_files = [ - "__version__.py", + "gammapy_tools/__version__.py", "pyproject.toml:version" ] From 2f808ff8683870a913b1df59b6872f599717278e Mon Sep 17 00:00:00 2001 From: Ste O'Brien Date: Thu, 9 May 2024 12:22:58 -0400 Subject: [PATCH 8/8] build(Docker-and-apptainer): updating container files updating apptainer file to use pip instead of mamba. modernizing dockerfile --- Dockerfile | 7 +++++-- docker-compose.yml | 4 ++-- docker_build.sh | 0 gammapy-tools.def | 29 +++++++++++++++++------------ 4 files changed, 24 insertions(+), 16 deletions(-) mode change 100644 => 100755 docker_build.sh diff --git a/Dockerfile b/Dockerfile index 1440947..273d43b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM jupyter/minimal-notebook AS base # Install gammapy -RUN mamba install gcc jupyterlab --yes +RUN mamba install gcc jupyterlab "gammapy==1.2" --yes WORKDIR /gammapy-tools @@ -28,14 +28,17 @@ COPY --from=base /opt/conda /opt/conda WORKDIR /gammapy-tools/tmp_build # RUN gammapy download datasets -ENV GAMMAPY_DATA=/gammapy-tools/gammapy-datasets/1.1/ +ENV GAMMAPY_DATA=/gammapy-tools/gammapy-datasets/1.2/ RUN mkdir -p $GAMMAPY_DATA +WORKDIR /gammapy-tools/ +RUN gammapy download datasets # Add package ADD --chown=1000:100 . /gammapy-tools/tmp_build/gammapy-tools WORKDIR /gammapy-tools/tmp_build/gammapy-tools + # RUN ls -lah RUN pip install . # RUN ./gammapy_tools/Hipparcos_MAG8_1997.dat $GAMMAPY_DATA/catalogs/ diff --git a/docker-compose.yml b/docker-compose.yml index 56e80ff..1716ad6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,5 +11,5 @@ services: - JUPYTER_TOKEN=letmein volumes: # Replace with where your data is located - - /path/to/data:/local_data - - $GAMMAPY_DATA:/gammapy-tools/gammapy-datasets/1.1/ \ No newline at end of file + - /home/obriens/DataAnalysis/Veritas/Gammapy:/local_data + # - $GAMMAPY_DATA:/gammapy-tools/gammapy-datasets/1.1/ \ No newline at end of file diff --git a/docker_build.sh b/docker_build.sh old mode 100644 new mode 100755 diff --git a/gammapy-tools.def b/gammapy-tools.def index 00ebfc9..b6e6e2a 100644 --- a/gammapy-tools.def +++ b/gammapy-tools.def @@ -16,22 +16,27 @@ Stage: build # Port for jupyter lab export JPORT=8000 export GAMMAPY_DATA=/gamma-tools/gammapy-datasets/1.2/ - . "/opt/conda/etc/profile.d/conda.sh" - . /opt/conda/bin/activate - export PATH=/opt/conda/bin/:$PATH - + #. "/opt/conda/etc/profile.d/conda.sh" + #. /opt/conda/bin/activate + #export PATH=/opt/conda/bin/:$PATH + . /opt/gammapy_tools/bin/activate + + %post apt update apt upgrade -y - apt install curl bash git gcc btop emacs -y + apt install curl bash git gcc btop emacs python3 python3-pip python3.12-venv -y # Install mamba and base env - curl -L -O "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" - bash Miniforge3-$(uname)-$(uname -m).sh -b -p /opt/conda - . /opt/conda/bin/activate - - mamba install -c conda-forge iminuit cmasher pip papermill matplotlib pip "jupyterlab==4.0.12" notebook ipykernel ipython ipywidgets --yes + #curl -L -O "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" + #bash Miniforge3-$(uname)-$(uname -m).sh -b -p /opt/conda + #. /opt/conda/bin/activate + + python3 -m venv /opt/gammapy_tools + . /opt/gammapy_tools/bin/activate + pip install iminuit cmasher pip papermill matplotlib pip "jupyterlab==4.0.12" notebook ipykernel ipython ipywidgets + #mamba install -c conda-forge iminuit cmasher pip papermill matplotlib pip "jupyterlab==4.0.12" notebook ipykernel ipython ipywidgets --yes # Install v2dl3 @@ -41,7 +46,7 @@ Stage: build # Because its an env install of requirements # Note the python version in the latest test throws issues with pytest - grep -A 100 "dependencies:" environment-eventdisplay.yml | grep "-" | grep -v "python" | awk '{print $2}' | xargs mamba install --yes + grep -A 100 "dependencies:" environment-eventdisplay.yml | grep "-" | grep -v "python" | awk '{print $2}' | xargs pip install # root_numpy throws issues too. Only VEGAS uses it mv setup.py _setup.py && grep -v "root_numpy" _setup.py > setup.py && pip install . @@ -59,7 +64,7 @@ Stage: build cd /gamma-tools ; rm -r /gamma-tools/tmp_build - mamba clean -a --yes + #mamba clean -a --yes python -m pip cache purge