From a0c192e46f451e88ff8a4d81ee43ab76c440f58e Mon Sep 17 00:00:00 2001 From: Janos Gabler Date: Thu, 22 Aug 2024 17:09:17 +0200 Subject: [PATCH] Polish changelog for release 0.5.0 (#525) --- CHANGES.md | 77 ++++++++++-- README.md | 18 ++- .../source/explanation/internal_optimizers.md | 112 +----------------- pyproject.toml | 13 +- 4 files changed, 92 insertions(+), 128 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 184c3dc88..7ac3de308 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -10,22 +10,50 @@ we drop the official support for Python 3.9. ## 0.5.0 -This is a major release with several breaking changes and deprecations. On a high level, -the major changes are: +This is a major release with several breaking changes and deprecations. In this +release we started implementing two major enhancement proposals and renamed the package +from estimagic to optimagic (while keeping the `estimagic` namespace for the estimation +capabilities). -- Implement EP-02: Static typing -- Implement EP-03: Alignment with SciPy -- Rename the package from `estimagic` to `optimagic` (while keeping the `estimagic` - namespace for the estimation capabilities). +- [EP-02: Static typing](https://estimagic.org/en/latest/development/ep-02-typing.html) +- [EP-03: Alignment with SciPy](https://estimagic.org/en/latest/development/ep-03-alignment.html) +The implementation of the two enhancement proposals is not complete and will likely +take until version `0.6.0`. However, all breaking changes and deprecations (with the +exception of a minor change in benchmarking) are already implemented such that updating +to version `0.5.0` is future proof. - {gh}`500` removes the dashboard, the support for simopt optimizers and the `derivative_plot` ({ghuser}`janosg`) +- {gh}`502` renames estimagic to optimagic ({ghuser}`janosg`) - {gh}`504` aligns `maximize` and `minimize` more closely with scipy. All related deprecations and breaking changes are listed below. As a result, scipy code that uses minimize with the arguments `x0`, `fun`, `jac` and `method` will run without changes in optimagic. Similarly, to `OptimizeResult` gets some aliases so it behaves more like SciPy's. +- {gh}`506` introduces the new `Bounds` object and deprecates `lower_bounds`, + `upper_bounds`, `soft_lower_bounds` and `soft_upper_bounds` ({ghuser}`janosg`) +- {gh}`507` updates the infrastructure so we can make parallel releases under the names + `optimagic` and `estimagic` ({ghuser}`timmens`) +- {gh}`508` introduces the new `ScalingOptions` object and deprecates the + `scaling_options` argument of `maximize` and `minimize` ({ghuser}`timmens`) +- {gh}`512` implements the new interface for objective functions and derivatives + ({ghuser}`janosg`) +- {gh}`513` implements the new `optimagic.MultistartOptions` object and deprecates the + `multistart_options` argument of `maximize` and `minimize` ({ghuser}`timmens`) +- {gh}`514` and {gh}`516` introduce the `NumdiffResult` object that is returned from + `first_derivative` and `second_derivative`. It also fixes several bugs in the + pytree handling in `first_derivative` and `second_derivative` and deprecates + Richardson Extrapolation and the `key` ({ghuser}`timmens`) +- {gh}`517` introduces the new `NumdiffOptions` object for configuring numerical + differentiation during optimization or estimation ({ghuser}`timmens`) +- {gh}`519` rewrites the logging code and introduces new `LogOptions` objects + ({ghuser}`schroedk`) +- {gh}`521` introduces the new internal algorithm interface. + ({ghuser}`janosg` and {ghuser}`mpetrosian`) +- {gh}`522` introduces the new `Constraint` objects and deprecates passing + dictionaries or lists of dictionaries as constraints ({ghuser}`timmens`) + ### Breaking changes @@ -33,8 +61,20 @@ the major changes are: `maximize` and `minimize` and the file already exists, the default behavior is to raise an error now. Replacement or extension of an existing file must be explicitly configured. -- The argument `if_table_exists` has no effect anymore and a +- The argument `if_table_exists` in `log_options` has no effect anymore and a corresponding warning is raised. +- `OptimizeResult.history` is now a `optimagic.History` object instead of a + dictionary. Dictionary style access is implemented but deprecated. Other dictionary + methods might not work. +- The result of `first_derivative` and `second_derivative` is now a + `optimagic.NumdiffResult` object instead of a dictionary. Dictionary style access is + implemented but other dictionary methods might not work. +- The dashboard is removed +- The `derivative_plot` is removed. +- Optimizers from Simopt are removed. +- Passing callables with the old internal algorithm interface as `algorithm` to + `minimize` and `maximize` is not supported anymore. Use the new + `Algorithm` objects instead. For examples see: https://tinyurl.com/24a5cner ### Deprecations @@ -62,10 +102,29 @@ the major changes are: - `convergence_scaled_gradient_tolerance` -> `convergence_gtol_scaled` - `stopping_max_criterion_evaluations` -> `stopping_maxfun` - `stopping_max_iterations` -> `stopping_maxiter` -- The `log_options` argument of `minimize` and `maximize` is deprecated, - an according FutureWarning is raised. +- The arguments `lower_bounds`, `upper_bounds`, `soft_lower_bounds` and + `soft_upper_bounds` are deprecated and replaced by `optimagic.Bounds`. This affects + `maximize`, `minimize`, `estimate_ml`, `estimate_msm`, `slice_plot` and several + other functions. +- The `log_options` argument of `minimize` and `maximize` is deprecated. Instead, + `LogOptions` objects can be passed under the `logging` argument. - The class `OptimizeLogReader` is deprecated and redirects to `SQLiteLogReader`. +- The `scaling_options` argument of `maximize` and `minimize` is deprecated. Instead a + `ScalingOptions` object can be passed under the `scaling` argument that was previously + just a bool. +- Objective functions that return a dictionary with the special keys "value", + "contributions" and "root_contributions" are deprecated. Instead, likelihood and + least-squares functions are marked with a `mark.likelihood` or `mark.least_squares` + decorator. There is a detailed how-to guide that shows the new behavior. This affects + `maximize`, `minimize`, `slice_plot` and other functions that work with objective + functions. +- The `multistart_options` argument of `minimize` and `maximize` is deprecated. Instead, + a `MultistartOptions` object can be passed under the `multistart` argument. +- Richardson Extrapolation is deprecated in `first_derivative` and `second_derivative` +- The `key` argument is deprecated in `first_derivative` and `second_derivative` +- Passing dictionaries or lists of dictionaries as `constraints` to `maximize` or + `minimize` is deprecated. Use the new `Constraint` objects instead. ## 0.4.7 diff --git a/README.md b/README.md index af35ebef2..26d037c50 100644 --- a/README.md +++ b/README.md @@ -28,10 +28,20 @@ tools, parallel numerical derivatives and more. perform statistical inference on estimated parameters. *estimagic* is now a subpackage of *optimagic*. +## Documentation + +The documentation is hosted at https://optimagic.readthedocs.io + ## Installation -The package can be installed via conda. To do so, type the following commands in a -terminal: +The package can be installed via pip or conda. To do so, type the following commands in +a terminal: + +```bash +pip install optimagic +``` + +or ```bash $ conda config --add channels conda-forge @@ -67,10 +77,6 @@ To enable all algorithms at once, do the following: `pip install fides>=0.7.4 (Make sure you have at least 0.7.1)` -## Documentation - -The documentation is hosted ([on rtd](https://estimagic.readthedocs.io/en/latest/#)) - ## Citation If you use optimagic for your research, please do not forget to cite it. diff --git a/docs/source/explanation/internal_optimizers.md b/docs/source/explanation/internal_optimizers.md index 3ee33f806..bb004e104 100644 --- a/docs/source/explanation/internal_optimizers.md +++ b/docs/source/explanation/internal_optimizers.md @@ -23,49 +23,10 @@ transformed problem. ## The internal optimizer interface -An internal optimizer is a a function that minimizes a criterion function and fulfills a -few conditions. In our experience, it is not hard to wrap any optimizer into this -interface. The mandatory conditions for an internal optimizer function are: - -1. It is decorated with the `mark_minimizer` decorator and thus carries information that - tells optimagic how to use the internal optimizer. - -1. It uses the standard names for the arguments that describe the optimization problem: - - - criterion: for the criterion function - - x: for the start parameters in form of a 1d numpy array - - derivative: for the first derivative of the criterion function - - criterion_and_derivative: for a function that evaluates the criterion and its first - derivative jointly - - lower_bounds: for lower bounds in form of a 1d numpy array - - upper_bounds: for upper bounds in form of a 1d numpy array - - nonlinear_constraints: for nonlinear constraints in form a list of dictionaries - - Of course, algorithms that do not need a certain argument (e.g. unbounded or - derivative free ones) do not need those arguments at all. - -1. All other arguments have default values. - -(internal-optimizer-output)= +(to be written) ## Output of internal optimizers -After convergence or when another stopping criterion is achieved the internal optimizer -should return a dictionary with the following entries: - -- solution_x: The best parameter achieved so far -- solution_criterion: The value of the criterion at solution_x. This can be a scalar or - dictionary. -- n_fun_evals: The number of criterion evaluations. -- n_jac_evals: The number of derivative evaluations. -- n_iterations: The number of iterations -- success: True if convergence was achieved -- message: A string with additional information. - -If some of the entries are missing, they will automatically be filled with `None` and no -errors are raised. Nevertheless, you should try to return as much information as -possible. - (naming-conventions)= ## Naming conventions for algorithm specific arguments @@ -89,75 +50,8 @@ the exact meaning of all options for all optimizers. ## Algorithms that parallelize -Algorithms can evaluate the criterion function in parallel. To make such a parallel -algorithm fully compatible with optimagic (including history collection and benchmarking -functionality), the following conditions need to be fulfilled: - -- The algorithm has an argument called `n_cores` which determines how many cores are - used for the parallelization. -- The algorithm has an argument called `batch_evaluator` and all parallelization is done - using a built-in or user provided batch evaluator. - -Moreover, we strongly suggest to comply with the following convention: - -- The algorithm has an argument called `batch_size` which is an integer that is greater - or equal to `n_cores`. Setting the `batch_size` larger than n_cores, allows to - simulate how the algorithm would behave with `n_cores=batch_size` but only uses - `n_cores` cores. This allows to simulate / benchmark the parallelizability of an - algorithm even if no parallel hardware is available. - -If the mandatory conditions are not fulfilled, the algorithm should disable all history -collection by using `mark_minimizer(..., disable_history=True)`. +(to be written) ## Nonlinear constraints -optimagic can pass nonlinear constraints to the internal optimizer. The internal -interface for nonlinear constraints is as follows. - -A nonlinear constraint is a `list` of `dict` 's, where each `dict` represents a group of -constraints. In each group the constraint function can potentially be multi-dimensional. -We distinguish between equality and inequality constraints, which is signalled by a dict -entry `type` that takes values `"eq"` and `"ineq"`. The constraint function, which takes -as input an internal parameter vector, is stored under the entry `fun`, while the -Jacobian of that function is stored at `jac`. The tolerance for the constraints is -stored under `tol`. At last, the number of constraints in each group is specified under -`n_constr`. An example list with one constraint that would be passed to the internal -optimizer is given by - -``` -constraints = [ - { - "type": "ineq", - "n_constr": 1, - "tol": 1e-5, - "fun": lambda x: x**3, - "jac": lambda x: 3 * x**2, - } -] -``` - -**Equality.** Internal equality constraints assume that the constraint is met when the -function is zero. That is - -$$ -0 = g(x) \in \mathbb{R}^m . -$$ - -**Inequality.** Internal inequality constraints assume that the constraint is met when -the function is greater or equal to zero. That is - -$$ -0 \leq g(x) \in \mathbb{R}^m . -$$ - -## Other conventions - -- Internal optimizer are functions and should thus adhere to python naming conventions, - for functions (i.e. only consist of lowercase letters and individual words should be - separated by underscores). For optimizers that are implemented in many packages (e.g. - Nelder Mead or BFGS), the name of the original package in which it was implemented has - to be part of the name. -- All arguments of an internal optimizer should actually be used. In particular, if an - optimizer does not support bounds it should not have `lower_bounds` and `upper_bounds` - as arguments; derivative free optimizers should not have `derivative` or - `criterion_and_derivative` as arguments, etc. +(to be written) diff --git a/pyproject.toml b/pyproject.toml index 4a5afe978..655148e12 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,23 +14,28 @@ dependencies = [ "pybaum>=0.1.2", "scipy>=1.2.1", "sqlalchemy>=1.3", + "annotated-types", + "typing-extensions", + "nlopt", ] dynamic = ["version"] keywords = [ - "econometrics", + "nonlinear optimization", + "optimization", + "derivative free optimization", + "global optimization", + "parallel optimization", "statistics", "estimation", "extremum estimation", - "optimization", "inference", "numerical differentiation", "finite differences", - "derivative free optimization", "method of simulated moments", "maximum likelihood", ] classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: MacOS :: MacOS X",