Skip to content

Commit

Permalink
Merge pull request #39 from martinvonk/dev
Browse files Browse the repository at this point in the history
Update main to v0.4.2
  • Loading branch information
martinvonk authored Apr 29, 2024
2 parents 2549eab + a39b4bd commit a422933
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 14 deletions.
4 changes: 2 additions & 2 deletions doc/examples/example01_indices.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"text": [
"Versions\n",
"python: 3.10.12\n",
"spei: 0.4.0\n",
"spei: 0.4.1\n",
"numpy: 1.26.4\n",
"scipy: 1.12.0\n",
"matplotlib: 3.8.3\n",
Expand Down Expand Up @@ -131,7 +131,7 @@
],
"source": [
"f = 90 # days\n",
"series = prec.rolling(f\"{f}D\", min_periods=f).sum().dropna()\n",
"series = prec.rolling(f, min_periods=f).sum().dropna()\n",
"series"
]
},
Expand Down
2 changes: 1 addition & 1 deletion src/spei/_version.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from importlib import metadata
from platform import python_version

__version__ = "0.4.1"
__version__ = "0.4.2"


def show_versions() -> str:
Expand Down
4 changes: 2 additions & 2 deletions src/spei/dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def cdf(self) -> Series:
"""Compute cumulative density function of a Scipy Continuous Distribution"""
if self.pars is not None:
cdf = self.dist.cdf(
self.data.values, self.pars, loc=self.loc, scale=self.scale
self.data.values, *self.pars, loc=self.loc, scale=self.scale
)
else:
cdf = self.dist.cdf(self.data.values, loc=self.loc, scale=self.scale)
Expand All @@ -117,7 +117,7 @@ def pdf(self) -> Series:
data_pdf = self.data.sort_values()
if self.pars is not None:
pdf = self.dist.pdf(
data_pdf.values, self.pars, loc=self.loc, scale=self.scale
data_pdf.values, *self.pars, loc=self.loc, scale=self.scale
)
else:
pdf = self.dist.pdf(data_pdf.values, loc=self.loc, scale=self.scale)
Expand Down
72 changes: 65 additions & 7 deletions src/spei/si.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ def sgi(
series: pandas.Series
Pandas time series of the groundwater levels. Time series index
should be a pandas DatetimeIndex.
fit_freq : str, optional, default=None
Frequency for fitting the distribution. Default is None in which case
the frequency of the series is inferred. If this fails a monthly
frequency is used.
Returns
-------
Expand Down Expand Up @@ -75,9 +79,26 @@ def spi(
However, for the SPI generally the Gamma probability density
function is recommended. Other appropriate choices could be the
lognormal, log-logistic (fisk) or PearsonIII distribution.
prob_zero: bool
Option to correct the distribution if x=0 is not in probability
density function. E.g. the case with the Gamma distriubtion.
timescale : int, optional, default=0
Size of the moving window over which the series is summed. If zero, no
summation is performed over the time series. If the time series
frequency is daily, then one would provide timescale=30 for SI1,
timescale=90 for SI3, timescale=180 for SI6 etc.
fit_freq : str, optional, default=None
Frequency for fitting the distribution. Default is None in which case
the frequency of the series is inferred. If this fails a monthly
frequency is used.
fit_window : int, optional, default=0
Window size for fitting data in fit_freq frequency's unit. Default is
zero in which case only data within the fit_freq is considered. If
larger than zero data data within the window is used to fit the
distribution for the series. fit_window must be a odd number larger
than 3 when used.
prob_zero : bool, default=True
Option to correct the distribution if x=0 is not in probability density
function. E.g. the case with the Gamma distriubtion. If True, the
probability of zero values in the series is calculated by the
occurence.
Returns
-------
Expand Down Expand Up @@ -124,6 +145,24 @@ def spei(
However, for the SPEI generally the log-logistic (fisk) probability
density function is recommended. Other appropriate choices could be
the lognormal or PearsonIII distribution.
timescale : int, optional, default=0
Size of the moving window over which the series is summed. If zero, no
summation is performed over the time series. If the time series
frequency is daily, then one would provide timescale=30 for SI1,
timescale=90 for SI3, timescale=180 for SI6 etc.
fit_freq : str, optional, default=None
Frequency for fitting the distribution. Default is None in which case
the frequency of the series is inferred. If this fails a monthly
frequency is used.
fit_window : int, optional, default=0
Window size for fitting data in fit_freq frequency's unit. Default is
zero in which case only data within the fit_freq is considered. If
larger than zero data data within the window is used to fit the
distribution for the series. fit_window must be a odd number larger
than 3 when used.
prob_zero : bool, default=False
Flag indicating whether the probability of zero values in the series is
calculated by the occurence.
Returns
-------
Expand Down Expand Up @@ -170,6 +209,24 @@ def ssfi(
However, for the SSFI generally the gamma probability density
function is recommended. Other appropriate choices could be the
normal, lognormal, pearsonIII, GEV or Gen-Logistic distribution.
timescale : int, optional, default=0
Size of the moving window over which the series is summed. If zero, no
summation is performed over the time series. If the time series
frequency is daily, then one would provide timescale=30 for SI1,
timescale=90 for SI3, timescale=180 for SI6 etc.
fit_freq : str, optional, default=None
Frequency for fitting the distribution. Default is None in which case
the frequency of the series is inferred. If this fails a monthly
frequency is used.
fit_window : int, optional, default=0
Window size for fitting data in fit_freq frequency's unit. Default is
zero in which case only data within the fit_freq is considered. If
larger than zero data data within the window is used to fit the
distribution for the series. fit_window must be a odd number larger
than 3 when used.
prob_zero : bool, default=False
Flag indicating whether the probability of zero values in the series is
calculated by the occurence.
Returns
-------
Expand Down Expand Up @@ -207,9 +264,10 @@ class SI:
dist : ContinuousDist
The SciPy continuous distribution associated with the data.
timescale : int, optional, default=0
Rolling window timescale in days over which the series is summed. For
SI1 the user would provide timescale=30, for SI3: timescale=90, SI6:
timescale=180 etc.
Size of the moving window over which the series is summed. If zero, no
summation is performed over the time series. If the time series
frequency is daily, then one would provide timescale=30 for SI1,
timescale=90 for SI3, timescale=180 for SI6 etc.
fit_freq : str, optional, default=None
Frequency for fitting the distribution. Default is None in which case
the frequency of the series is inferred. If this fails a monthly
Expand Down Expand Up @@ -258,7 +316,7 @@ def __post_init__(self) -> None:

if self.timescale > 0:
self.series = (
self.series.rolling(f"{self.timescale}D", min_periods=self.timescale)
self.series.rolling(self.timescale, min_periods=self.timescale)
.sum()
.dropna()
.copy()
Expand Down
4 changes: 2 additions & 2 deletions src/spei/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def infer_frequency(index: Union[Index, DatetimeIndex]) -> str:
logging.info(
"Could not infer frequency from index, using monthly frequency instead"
)
inf_freq = "ME" if pd_version >= "2.1.0" else "M"
inf_freq = "ME" if pd_version >= "2.2.0" else "M"
else:
logging.info(f"Inferred frequency '{inf_freq}' from index")

Expand All @@ -81,7 +81,7 @@ def group_yearly_df(series: Series) -> DataFrame:
"""
strfstr: str = "%m-%d %H:%M:%S"
grs = {}
freq = "YE" if pd_version >= "2.1.0" else "Y"
freq = "YE" if pd_version >= "2.2.0" else "Y"
for year_timestamp, gry in series.groupby(Grouper(freq=freq)):
index = validate_index(gry.index)
gry.index = to_datetime(
Expand Down

0 comments on commit a422933

Please sign in to comment.