Skip to content

Commit

Permalink
[pre-commit.ci] pre-commit autoupdate (#463)
Browse files Browse the repository at this point in the history
  • Loading branch information
pre-commit-ci[bot] authored May 20, 2023
1 parent 68d9411 commit dfc968c
Show file tree
Hide file tree
Showing 9 changed files with 17 additions and 33 deletions.
16 changes: 4 additions & 12 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ repos:
- id: check-docstring-first
exclude: src/estimagic/optimization/algo_options.py
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.30.0
rev: v1.31.0
hooks:
- id: yamllint
exclude: tests/optimization/fixtures
Expand All @@ -67,7 +67,7 @@ repos:
- id: blacken-docs
exclude: docs/source/how_to_guides/optimization/how_to_specify_constraints.md
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.1
rev: v1.6.4
hooks:
- id: docformatter
args:
Expand All @@ -77,19 +77,11 @@ repos:
- --wrap-descriptions
- '88'
- --blank
exclude: src/estimagic/optimization/algo_options.py
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.261
rev: v0.0.263
hooks:
- id: ruff
# args:
# - --verbose
# - repo: https://github.com/kynan/nbstripout
# rev: 0.6.1
# hooks:
# - id: nbstripout
# args:
# - --extra-keys
# - metadata.kernelspec metadata.language_info.version metadata.vscode
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.7.0
hooks:
Expand Down
2 changes: 1 addition & 1 deletion src/estimagic/differentiation/derivatives.py
Original file line number Diff line number Diff line change
Expand Up @@ -723,7 +723,7 @@ def _convert_richardson_candidates_to_frame(jac, err):
def _convert_evals_to_numpy(
raw_evals, key, registry, is_scalar_out=False, is_vector_out=False
):
"""harmonize the output of the function evaluations.
"""Harmonize the output of the function evaluations.
The raw_evals might contain dictionaries of which we only need one entry, scalar
np.nan where we need arrays filled with np.nan or pandas objects. The processed
Expand Down
1 change: 0 additions & 1 deletion src/estimagic/examples/numdiff_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
Example inputs for the binary choice functions are in binary_choice_inputs.pickle. They
come from the statsmodels documentation:
https://tinyurl.com/y4x67vwl
We pickled them so we don't need statsmodels as a dependency.
Expand Down
2 changes: 1 addition & 1 deletion src/estimagic/inference/bootstrap_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def get_bootstrap_samples(data, rng, cluster_by=None, n_draws=1000):


def _get_bootstrap_samples_from_indices(data, bootstrap_indices):
"""convert bootstrap indices into actual bootstrap samples.
"""Convert bootstrap indices into actual bootstrap samples.
Args:
data (pandas.DataFrame): original dataset.
Expand Down
1 change: 0 additions & 1 deletion src/estimagic/optimization/algo_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@
"""


"""
-------------------------
Trust Region Parameters
Expand Down
8 changes: 1 addition & 7 deletions src/estimagic/optimization/tiktak.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,10 @@
TikTak (`Arnoud, Guvenen, and Kleineberg
<https://www.nber.org/system/files/working_papers/w26340/w26340.pdf>`_)
is an algorithm for solving global optimization problems. It performs local searches
is an algorithm for solving global optimization problems. It performs local searches
from a set of carefully-selected points in the parameter space.
First implemented in Python by Alisdair McKay (
`GitHub Repository <https://github.com/amckay/TikTak>`_)
"""
Expand Down
2 changes: 1 addition & 1 deletion src/estimagic/parameters/consolidate_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ def _consolidate_fixes_with_equality_constraints(
def _consolidate_bounds_with_equality_constraints(
equality_constraints, lower_bounds, upper_bounds
):
"""consolidate bounds with equality constraints.
"""Consolidate bounds with equality constraints.
Check that there are no incompatible bounds on equality constrained parameters and
set the bounds for equal parameters to the strictest bound encountered on any of
Expand Down
12 changes: 6 additions & 6 deletions src/estimagic/sensitivity/msm_sensitivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@


def calculate_sensitivity_to_bias(jac, weights):
"""calculate the sensitivity to bias.
"""Calculate the sensitivity to bias.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down Expand Up @@ -47,7 +47,7 @@ def calculate_sensitivity_to_bias(jac, weights):
def calculate_fundamental_sensitivity_to_noise(
jac, weights, moments_cov, params_cov_opt
):
"""calculate the fundamental sensitivity to noise.
"""Calculate the fundamental sensitivity to noise.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down Expand Up @@ -104,7 +104,7 @@ def calculate_fundamental_sensitivity_to_noise(
def calculate_actual_sensitivity_to_noise(
sensitivity_to_bias, weights, moments_cov, params_cov
):
"""calculate the actual sensitivity to noise.
"""Calculate the actual sensitivity to noise.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down Expand Up @@ -159,7 +159,7 @@ def calculate_actual_sensitivity_to_noise(


def calculate_actual_sensitivity_to_removal(jac, weights, moments_cov, params_cov):
"""calculate the actual sensitivity to removal.
"""Calculate the actual sensitivity to removal.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down Expand Up @@ -211,7 +211,7 @@ def calculate_actual_sensitivity_to_removal(jac, weights, moments_cov, params_co


def calculate_fundamental_sensitivity_to_removal(jac, moments_cov, params_cov_opt):
"""calculate the fundamental sensitivity to removal.
"""Calculate the fundamental sensitivity to removal.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down Expand Up @@ -269,7 +269,7 @@ def calculate_fundamental_sensitivity_to_removal(jac, moments_cov, params_cov_op


def calculate_sensitivity_to_weighting(jac, weights, moments_cov, params_cov):
"""calculate the sensitivity to weighting.
"""Calculate the sensitivity to weighting.
The sensitivity measure is calculated for each parameter wrt each moment.
Expand Down
6 changes: 3 additions & 3 deletions tests/optimization/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pandas as pd
import pytest
from estimagic.examples.criterion_functions import sos_scalar_criterion
from estimagic.exceptions import InvalidKwargsError
from estimagic.exceptions import InvalidKwargsError, InvalidFunctionError
from estimagic.optimization.optimize import maximize, minimize


Expand All @@ -23,9 +23,9 @@ def test_scipy_lbfgsb_actually_calls_criterion_and_derivative():
params = pd.DataFrame(data=np.ones((10, 1)), columns=["value"])

def raising_crit_and_deriv(params): # noqa: ARG001
raise Exception()
raise NotImplementedError("This should not be called.")

with pytest.raises(Exception):
with pytest.raises(InvalidFunctionError, match="Error while evaluating"):
minimize(
criterion=sos_scalar_criterion,
params=params,
Expand Down

0 comments on commit dfc968c

Please sign in to comment.