AnalysisConfig#

class gammapy.analysis.AnalysisConfig(*, general: gammapy.analysis.config.GeneralConfig = GeneralConfig(log=LogConfig(level='info', filename=None, filemode=None, format=None, datefmt=None), outdir='.', n_jobs=1, datasets_file=None, models_file=None), observations: gammapy.analysis.config.ObservationsConfig = ObservationsConfig(datastore=PosixPath('$GAMMAPY_DATA/hess-dl3-dr1'), obs_ids=[], obs_file=None, obs_cone=SpatialCircleConfig(frame=None, lon=None, lat=None, radius=None), obs_time=TimeRangeConfig(start=None, stop=None), required_irf=[<RequiredHDUEnum.aeff: 'aeff'>, <RequiredHDUEnum.edisp: 'edisp'>, <RequiredHDUEnum.psf: 'psf'>, <RequiredHDUEnum.bkg: 'bkg'>]), datasets: gammapy.analysis.config.DatasetsConfig = DatasetsConfig(type=<ReductionTypeEnum.spectrum: '1d'>, stack=True, geom=GeomConfig(wcs=WcsConfig(skydir=SkyCoordConfig(frame=None, lon=None, lat=None), binsize=<Angle 0.02 deg>, width=WidthConfig(width=<Angle 5. deg>, height=<Angle 5. deg>), binsize_irf=<Angle 0.2 deg>), selection=SelectionConfig(offset_max=<Angle 2.5 deg>), axes=EnergyAxesConfig(energy=EnergyAxisConfig(min=<Quantity 1. TeV>, max=<Quantity 10. TeV>, nbins=5), energy_true=EnergyAxisConfig(min=<Quantity 0.5 TeV>, max=<Quantity 20. TeV>, nbins=16))), map_selection=[<MapSelectionEnum.counts: 'counts'>, <MapSelectionEnum.exposure: 'exposure'>, <MapSelectionEnum.background: 'background'>, <MapSelectionEnum.psf: 'psf'>, <MapSelectionEnum.edisp: 'edisp'>], background=BackgroundConfig(method=None, exclusion=None, parameters={}), safe_mask=SafeMaskConfig(methods=[<SafeMaskMethodsEnum.aeff_default: 'aeff-default'>], parameters={}), on_region=SpatialCircleConfig(frame=None, lon=None, lat=None, radius=None), containment_correction=True), fit: gammapy.analysis.config.FitConfig = FitConfig(fit_range=EnergyRangeConfig(min=None, max=None)), flux_points: gammapy.analysis.config.FluxPointsConfig = FluxPointsConfig(energy=EnergyAxisConfig(min=None, max=None, nbins=None), source='source', parameters={'selection_optional': 'all'}), excess_map: gammapy.analysis.config.ExcessMapConfig = ExcessMapConfig(correlation_radius=<Angle 0.1 deg>, parameters={}, energy_edges=EnergyAxisConfig(min=None, max=None, nbins=None)), light_curve: gammapy.analysis.config.LightCurveConfig = LightCurveConfig(time_intervals=TimeRangeConfig(start=None, stop=None), energy_edges=EnergyAxisConfig(min=None, max=None, nbins=None), source='source', parameters={'selection_optional': 'all'}))[source]#

Bases: gammapy.analysis.config.GammapyBaseConfig

Gammapy analysis configuration.

Create a new model by parsing and validating input data from keyword arguments.

Raises ValidationError if the input data cannot be parsed to form a valid model.

Methods Summary

construct([_fields_set])

Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.

copy(*[, include, exclude, update, deep])

Duplicate a model, optionally choose which fields to include, exclude and change.

dict(*[, include, exclude, by_alias, ...])

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

from_orm(obj)

from_yaml(config_str)

Create from YAML string.

json(*[, include, exclude, by_alias, ...])

Generate a JSON representation of the model, include and exclude arguments as per dict().

parse_file(path, *[, content_type, ...])

parse_obj(obj)

parse_raw(b, *[, content_type, encoding, ...])

read(path)

Reads from YAML file.

schema([by_alias, ref_template])

schema_json(*[, by_alias, ref_template])

set_logging()

Set logging config.

to_yaml()

Convert to YAML string.

update([config])

Update config with provided settings.

update_forward_refs(**localns)

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate(value)

write(path[, overwrite])

Write to YAML file.

Methods Documentation

classmethod construct(_fields_set: Optional[SetStr] = None, **values: Any) Model#

Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

copy(*, include: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, exclude: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, update: Optional[DictStrAny] = None, deep: bool = False) Model#

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters
  • include – fields to include in new model

  • exclude – fields to exclude from new model, as with values this takes precedence over include

  • update – values to change/add in the new model. Note: the data is not validated before creating the new model: you should trust this data

  • deep – set to True to make a deep copy of the model

Returns

new model instance

dict(*, include: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, exclude: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False) DictStrAny#

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

classmethod from_orm(obj: Any) Model#
classmethod from_yaml(config_str)[source]#

Create from YAML string.

json(*, include: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, exclude: Optional[Union[AbstractSetIntStr, MappingIntStrAny]] = None, by_alias: bool = False, skip_defaults: Optional[bool] = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, encoder: Optional[Callable[[Any], Any]] = None, models_as_dict: bool = True, **dumps_kwargs: Any) unicode#

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

classmethod parse_file(path: Union[str, pathlib.Path], *, content_type: unicode = None, encoding: unicode = 'utf8', proto: pydantic.parse.Protocol = None, allow_pickle: bool = False) Model#
classmethod parse_obj(obj: Any) Model#
classmethod parse_raw(b: Union[str, bytes], *, content_type: unicode = None, encoding: unicode = 'utf8', proto: pydantic.parse.Protocol = None, allow_pickle: bool = False) Model#
classmethod read(path)[source]#

Reads from YAML file.

classmethod schema(by_alias: bool = True, ref_template: unicode = '#/definitions/{model}') DictStrAny#
classmethod schema_json(*, by_alias: bool = True, ref_template: unicode = '#/definitions/{model}', **dumps_kwargs: Any) unicode#
set_logging()[source]#

Set logging config.

Calls logging.basicConfig, i.e. adjusts global logging state.

to_yaml()[source]#

Convert to YAML string.

update(config=None)[source]#

Update config with provided settings.

Parameters
configstring dict or AnalysisConfig object

Configuration settings provided in dict() syntax.

classmethod update_forward_refs(**localns: Any) None#

Try to update ForwardRefs on fields based on this Model, globalns and localns.

classmethod validate(value: Any) Model#
write(path, overwrite=False)[source]#

Write to YAML file.