# Licensed under a 3-clause BSD style license - see LICENSE.rstimporthtmlimportitertoolsimportloggingimportnumpyasnpfromastropy.tableimportTablefromgammapy.utils.pbarimportprogress_barfrom.covarianceimportCovariancefrom.iminuitimport(confidence_iminuit,contour_iminuit,covariance_iminuit,optimize_iminuit,)from.scipyimportconfidence_scipy,optimize_scipyfrom.sherpaimportoptimize_sherpa__all__=["Fit"]log=logging.getLogger(__name__)classRegistry:"""Registry of available backends for given tasks. Gives users the power to extend from their scripts. Used by `Fit` below. Not sure if we should call it "backend" or "method" or something else. Probably we will code up some methods, e.g. for profile analysis ourselves, using scipy or even just Python / Numpy? """register={"optimize":{"minuit":optimize_iminuit,"sherpa":optimize_sherpa,"scipy":optimize_scipy,},"covariance":{"minuit":covariance_iminuit,# "sherpa": covariance_sherpa,# "scipy": covariance_scipy,},"confidence":{"minuit":confidence_iminuit,# "sherpa": confidence_sherpa,"scipy":confidence_scipy,},}@classmethoddefget(cls,task,backend):iftasknotincls.register:raiseValueError(f"Unknown task {task!r}")backend_options=cls.register[task]ifbackendnotinbackend_options:raiseValueError(f"Unknown backend {backend!r} for task {task!r}")returnbackend_options[backend]registry=Registry()
[docs]classFit:"""Fit class. The fit class provides a uniform interface to multiple fitting backends. Currently available: "minuit", "sherpa" and "scipy". Parameters ---------- backend : {"minuit", "scipy" "sherpa"} Global backend used for fitting. Default is "minuit". optimize_opts : dict Keyword arguments passed to the optimizer. For the `"minuit"` backend see https://iminuit.readthedocs.io/en/stable/reference.html#iminuit.Minuit for a detailed description of the available options. If there is an entry 'migrad_opts', those options will be passed to `iminuit.Minuit.migrad()`. For the `"sherpa"` backend you can from the options: * `"simplex"` * `"levmar"` * `"moncar"` * `"gridsearch"` Those methods are described and compared in detail on http://cxc.cfa.harvard.edu/sherpa/methods/index.html. The available options of the optimization methods are described on the following pages in detail: * http://cxc.cfa.harvard.edu/sherpa/ahelp/neldermead.html * http://cxc.cfa.harvard.edu/sherpa/ahelp/montecarlo.html * http://cxc.cfa.harvard.edu/sherpa/ahelp/gridsearch.html * http://cxc.cfa.harvard.edu/sherpa/ahelp/levmar.html For the `"scipy"` backend the available options are described in detail here: https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html covariance_opts : dict Covariance options passed to the given backend. confidence_opts : dict Extra arguments passed to the backend. E.g. `iminuit.Minuit.minos` supports a ``maxcall`` option. For the scipy backend ``confidence_opts`` are forwarded to `~scipy.optimize.brentq`. If the confidence estimation fails, the bracketing interval can be adapted by modifying the upper bound of the interval (``b``) value. store_trace : bool Whether to store the trace of the fit. """def__init__(self,backend="minuit",optimize_opts=None,covariance_opts=None,confidence_opts=None,store_trace=False,):self.store_trace=store_traceself.backend=backendifoptimize_optsisNone:optimize_opts={"backend":backend}ifcovariance_optsisNone:covariance_opts={"backend":backend}ifconfidence_optsisNone:confidence_opts={"backend":backend}self.optimize_opts=optimize_optsself.covariance_opts=covariance_optsself.confidence_opts=confidence_optsself._minuit=Nonedef_repr_html_(self):try:returnself.to_html()exceptAttributeError:returnf"<pre>{html.escape(str(self))}</pre>"@staticmethoddef_parse_datasets(datasets):fromgammapy.datasetsimportDataset,Datasetsifisinstance(datasets,(list,Dataset)):datasets=Datasets(datasets)returndatasets,datasets.parameters
[docs]defrun(self,datasets):"""Run all fitting steps. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. Returns ------- fit_result : `FitResult` Fit result. """optimize_result=self.optimize(datasets=datasets)ifself.backendnotinregistry.register["covariance"]:log.warning("No covariance estimate - not supported by this backend.")returnFitResult(optimize_result=optimize_result)covariance_result=self.covariance(datasets=datasets,optimize_result=optimize_result)optimize_result.models.covariance=Covariance(optimize_result.models.parameters,covariance_result.matrix)returnFitResult(optimize_result=optimize_result,covariance_result=covariance_result,)
[docs]defoptimize(self,datasets):"""Run the optimization. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. Returns ------- optimize_result : `OptimizeResult` Optimization result. """datasets,parameters=self._parse_datasets(datasets=datasets)datasets.parameters.check_limits()iflen(parameters.free_parameters.names)==0:raiseValueError("No free parameters for fitting")parameters.autoscale()kwargs=self.optimize_opts.copy()backend=kwargs.pop("backend",self.backend)compute=registry.get("optimize",backend)# TODO: change this calling interface!# probably should pass a fit statistic, which has a model, which has parameters# and return something simpler, not a tuple of three thingsfactors,info,optimizer=compute(parameters=parameters,function=datasets.stat_sum,store_trace=self.store_trace,**kwargs,)ifbackend=="minuit":self._minuit=optimizerkwargs["method"]="migrad"trace=Table(info.pop("trace"))ifself.store_trace:idx=[parameters.index(par)forparinparameters.unique_parameters.free_parameters]unique_names=np.array(datasets.models.parameters_unique_names)[idx]trace.rename_columns(trace.colnames[1:],list(unique_names))# Copy final results into the parameters objectparameters.set_parameter_factors(factors)parameters.check_limits()returnOptimizeResult(models=datasets.models.copy(),total_stat=datasets.stat_sum(),backend=backend,method=kwargs.get("method",backend),trace=trace,minuit=optimizer,**info,)
[docs]defcovariance(self,datasets,optimize_result=None):"""Estimate the covariance matrix. Assumes that the model parameters are already optimised. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. optimize_result : `OptimizeResult`, optional Optimization result. Can be optionally used to pass the state of the IMinuit object to the covariance estimation. This might save computation time in certain cases. Default is None. Returns ------- result : `CovarianceResult` Results. """datasets,unique_pars=self._parse_datasets(datasets=datasets)parameters=datasets.models.parameterskwargs=self.covariance_opts.copy()ifoptimize_resultisnotNoneandoptimize_result.backend=="minuit":kwargs["minuit"]=optimize_result.minuitbackend=kwargs.pop("backend",self.backend)compute=registry.get("covariance",backend)withunique_pars.restore_status():ifself.backend=="minuit":method="hesse"else:method=""factor_matrix,info=compute(parameters=unique_pars,function=datasets.stat_sum,**kwargs)matrix=Covariance.from_factor_matrix(parameters=parameters,matrix=factor_matrix)datasets.models.covariance=matrixifoptimize_result:optimize_result.models.covariance=matrix.data.copy()returnCovarianceResult(backend=backend,method=method,success=info["success"],message=info["message"],matrix=optimize_result.models.covariance.data,)
[docs]defconfidence(self,datasets,parameter,sigma=1,reoptimize=True):"""Estimate confidence interval. Extra ``kwargs`` are passed to the backend. E.g. `iminuit.Minuit.minos` supports a ``maxcall`` option. For the scipy backend ``kwargs`` are forwarded to `~scipy.optimize.brentq`. If the confidence estimation fails, the bracketing interval can be adapted by modifying the the upper bound of the interval (``b``) value. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. parameter : `~gammapy.modeling.Parameter` Parameter of interest. sigma : float, optional Number of standard deviations for the confidence level. Default is 1. reoptimize : bool, optional Re-optimize other parameters, when computing the confidence region. Default is True. Returns ------- result : dict Dictionary with keys "errp", 'errn", "success" and "nfev". """datasets,parameters=self._parse_datasets(datasets=datasets)kwargs=self.confidence_opts.copy()backend=kwargs.pop("backend",self.backend)compute=registry.get("confidence",backend)parameter=parameters[parameter]withparameters.restore_status():result=compute(parameters=parameters,parameter=parameter,function=datasets.stat_sum,sigma=sigma,reoptimize=reoptimize,**kwargs,)result["errp"]*=parameter.scaleresult["errn"]*=parameter.scalereturnresult
[docs]defstat_profile(self,datasets,parameter,reoptimize=False):"""Compute fit statistic profile. The method used is to vary one parameter, keeping all others fixed. So this is taking a "slice" or "scan" of the fit statistic. Notes ----- The progress bar can be displayed for this function. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. parameter : `~gammapy.modeling.Parameter` Parameter of interest. The specification for the scan, such as bounds and number of values is taken from the parameter object. reoptimize : bool, optional Re-optimize other parameters, when computing the confidence region. Default is False. Returns ------- results : dict Dictionary with keys "parameter_name_scan", "stat_scan" and "fit_results". The latter contains an empty list, if `reoptimize` is set to False. """datasets,parameters=self._parse_datasets(datasets=datasets)parameter=parameters[parameter]values=parameter.scan_valuesstats=[]fit_results=[]withparameters.restore_status():forvalueinprogress_bar(values,desc="Scan values"):parameter.value=valueifreoptimize:parameter.frozen=Trueresult=self.optimize(datasets=datasets)stat=result.total_statfit_results.append(result)else:stat=datasets.stat_sum()stats.append(stat)idx=datasets.parameters.index(parameter)name=datasets.models.parameters_unique_names[idx]return{f"{name}_scan":values,"stat_scan":np.array(stats),"fit_results":fit_results,}
[docs]defstat_surface(self,datasets,x,y,reoptimize=False):"""Compute fit statistic surface. The method used is to vary two parameters, keeping all others fixed. So this is taking a "slice" or "scan" of the fit statistic. Caveat: This method can be very computationally intensive and slow See also: `Fit.stat_contour`. Notes ----- The progress bar can be displayed for this function. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. x, y : `~gammapy.modeling.Parameter` Parameters of interest. reoptimize : bool, optional Re-optimize other parameters, when computing the confidence region. Default is False. Returns ------- results : dict Dictionary with keys "x_values", "y_values", "stat" and "fit_results". The latter contains an empty list, if `reoptimize` is set to False. """datasets,parameters=self._parse_datasets(datasets=datasets)x=parameters[x]y=parameters[y]stats=[]fit_results=[]withparameters.restore_status():forx_value,y_valueinprogress_bar(itertools.product(x.scan_values,y.scan_values),desc="Trial values"):x.value,y.value=x_value,y_valueifreoptimize:x.frozen,y.frozen=True,Trueresult=self.optimize(datasets=datasets)stat=result.total_statfit_results.append(result)else:stat=datasets.stat_sum()stats.append(stat)shape=(len(x.scan_values),len(y.scan_values))stats=np.array(stats).reshape(shape)ifreoptimize:fit_results=np.array(fit_results).reshape(shape)i1,i2=datasets.parameters.index(x),datasets.parameters.index(y)name_x=datasets.models.parameters_unique_names[i1]name_y=datasets.models.parameters_unique_names[i2]return{f"{name_x}_scan":x.scan_values,f"{name_y}_scan":y.scan_values,"stat_scan":stats,"fit_results":fit_results,}
[docs]defstat_contour(self,datasets,x,y,numpoints=10,sigma=1):"""Compute stat contour. Calls ``iminuit.Minuit.mncontour``. This is a contouring algorithm for a 2D function which is not simply the fit statistic function. That 2D function is given at each point ``(par_1, par_2)`` by re-optimising all other free parameters, and taking the fit statistic at that point. Very compute-intensive and slow. Parameters ---------- datasets : `Datasets` or list of `Dataset` Datasets to optimize. x, y : `~gammapy.modeling.Parameter` Parameters of interest. numpoints : int, optional Number of contour points. Default is 10. sigma : float, optional Number of standard deviations for the confidence level. Default is 1. Returns ------- result : dict Dictionary containing the parameter values defining the contour, with the boolean flag "success" and the information objects from ``mncontour``. """datasets,parameters=self._parse_datasets(datasets=datasets)x=parameters[x]y=parameters[y]i1,i2=datasets.parameters.index(x),datasets.parameters.index(y)name_x=datasets.models.parameters_unique_names[i1]name_y=datasets.models.parameters_unique_names[i2]withparameters.restore_status():result=contour_iminuit(parameters=parameters,function=datasets.stat_sum,x=x,y=y,numpoints=numpoints,sigma=sigma,)x=result["x"]*x.scaley=result["y"]*y.scalereturn{name_x:x,name_y:y,"success":result["success"],}
classFitStepResult:"""Fit result base class."""def__init__(self,backend,method,success,message):self._success=successself._message=messageself._backend=backendself._method=method@propertydefbackend(self):"""Optimizer backend used for the fit."""returnself._backend@propertydefmethod(self):"""Optimizer method used for the fit."""returnself._method@propertydefsuccess(self):"""Fit success status flag."""returnself._success@propertydefmessage(self):"""Optimizer status message."""returnself._messagedef__str__(self):return(f"{self.__class__.__name__}\n\n"f"\tbackend : {self.backend}\n"f"\tmethod : {self.method}\n"f"\tsuccess : {self.success}\n"f"\tmessage : {self.message}\n")def_repr_html_(self):try:returnself.to_html()exceptAttributeError:returnf"<pre>{html.escape(str(self))}</pre>"classCovarianceResult(FitStepResult):"""Covariance result object."""def__init__(self,matrix=None,**kwargs):self._matrix=matrixsuper().__init__(**kwargs)@propertydefmatrix(self):"""Covariance matrix as a `~numpy.ndarray`."""returnself._matrixclassOptimizeResult(FitStepResult):"""Optimize result object."""def__init__(self,models,nfev,total_stat,trace,minuit=None,**kwargs):self._models=modelsself._nfev=nfevself._total_stat=total_statself._trace=traceself._minuit=minuitsuper().__init__(**kwargs)@propertydefminuit(self):"""Minuit object."""returnself._minuit@propertydefparameters(self):"""Best fit parameters."""returnself.models.parameters@propertydefmodels(self):"""Best fit models."""returnself._models@propertydeftrace(self):"""Parameter trace from the optimisation."""returnself._trace@propertydefnfev(self):"""Number of function evaluations."""returnself._nfev@propertydeftotal_stat(self):"""Value of the fit statistic at minimum."""returnself._total_statdef__str__(self):string=super().__str__()string+=f"\tnfev : {self.nfev}\n"string+=f"\ttotal stat : {self.total_stat:.2f}\n\n"returnstringclassFitResult:"""Fit result class. Parameters ---------- optimize_result : `OptimizeResult` Result of the optimization step. covariance_result : `CovarianceResult` Result of the covariance step. """def__init__(self,optimize_result=None,covariance_result=None):self._optimize_result=optimize_resultself._covariance_result=covariance_result@propertydefminuit(self):"""Minuit object."""returnself.optimize_result.minuit@propertydefparameters(self):"""Best fit parameters of the optimization step."""returnself.optimize_result.parameters@propertydefmodels(self):"""Best fit parameters of the optimization step."""returnself.optimize_result.models@propertydeftotal_stat(self):"""Total stat of the optimization step."""returnself.optimize_result.total_stat@propertydeftrace(self):"""Parameter trace of the optimisation step."""returnself.optimize_result.trace@propertydefnfev(self):"""Number of function evaluations of the optimisation step."""returnself.optimize_result.nfev@propertydefbackend(self):"""Optimizer backend used for the fit."""returnself.optimize_result.backend@propertydefmethod(self):"""Optimizer method used for the fit."""returnself.optimize_result.method@propertydefmessage(self):"""Optimizer status message."""returnself.optimize_result.message@propertydefsuccess(self):"""Total success flag."""success=self.optimize_result.successifself.covariance_result:success&=self.covariance_result.successreturnsuccess@propertydefoptimize_result(self):"""Optimize result."""returnself._optimize_result@propertydefcovariance_result(self):"""Optimize result."""returnself._covariance_resultdef__str__(self):string=""ifself.optimize_result:string+=str(self.optimize_result)ifself.covariance_result:string+=str(self.covariance_result)returnstringdef_repr_html_(self):try:returnself.to_html()exceptAttributeError:returnf"<pre>{html.escape(str(self))}</pre>"