Skip to content

Commit

Permalink
Merge pull request #1834 from ranaroussi/dev
Browse files Browse the repository at this point in the history
sync dev -> main
  • Loading branch information
ValueRaider authored Jan 21, 2024
2 parents 176c3d6 + 2630c66 commit b837c1e
Show file tree
Hide file tree
Showing 8 changed files with 66 additions and 30 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ $ pip install yfinance --upgrade --no-cache-dir
To install with optional dependencies, replace `optional` with: `nospam` for [caching-requests](#smarter-scraping), `repair` for [price repair](https://github.com/ranaroussi/yfinance/wiki/Price-repair), or `nospam,repair` for both:

``` {.sourceCode .bash}
$ pip install yfinance[optional]
$ pip install "yfinance[optional]"
```

[Required dependencies](./requirements.txt) , [all dependencies](./setup.py#L62).
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ pytz>=2022.5
frozendict>=2.3.4
beautifulsoup4>=4.11.1
html5lib>=1.1
peewee>=3.16.2
peewee>=3.16.2
20 changes: 18 additions & 2 deletions tests/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,24 @@ def test_badTicker(self):
dat.fast_info[k]

for attribute_name, attribute_type in ticker_attributes:
assert_attribute_type(self, dat, attribute_name, attribute_type)

assert_attribute_type(self, dat, attribute_name, attribute_type)

with self.assertRaises(YFNotImplementedError):
assert isinstance(dat.earnings, pd.Series)
assert dat.earnings.empty
assert isinstance(dat.dividends, pd.Series)
assert dat.dividends.empty
assert isinstance(dat.splits, pd.Series)
assert dat.splits.empty
assert isinstance(dat.capital_gains, pd.Series)
assert dat.capital_gains.empty
with self.assertRaises(YFNotImplementedError):
assert isinstance(dat.shares, pd.DataFrame)
assert dat.shares.empty
assert isinstance(dat.actions, pd.DataFrame)
assert dat.actions.empty


def test_goodTicker(self):
# that yfinance works when full api is called on same instance of ticker

Expand Down
37 changes: 21 additions & 16 deletions yfinance/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import logging
import time as _time
import warnings
from typing import Optional
from typing import Optional, Union
from urllib.parse import quote as urlencode

import dateutil as _dateutil
Expand All @@ -42,7 +42,10 @@
from .scrapers.holders import Holders
from .scrapers.quote import Quote, FastInfo

from .const import _BASE_URL_, _ROOT_URL_
from .const import _BASE_URL_, _ROOT_URL_, price_colnames


_empty_series = pd.Series()


class TickerBase:
Expand Down Expand Up @@ -426,7 +429,9 @@ def history(self, period="1mo", interval="1d",
if not actions:
df = df.drop(columns=["Dividends", "Stock Splits", "Capital Gains"], errors='ignore')
if not keepna:
mask_nan_or_zero = (df.isna() | (df == 0)).all(axis=1)
data_colnames = price_colnames + ['Volume'] + ['Dividends', 'Stock Splits', 'Capital Gains']
data_colnames = [c for c in data_colnames if c in df.columns]
mask_nan_or_zero = (df[data_colnames].isna() | (df[data_colnames] == 0)).all(axis=1)
df = df.drop(mask_nan_or_zero.index[mask_nan_or_zero])

logger.debug(f'{self.ticker}: yfinance returning OHLC: {df.index[0]} -> {df.index[-1]}')
Expand Down Expand Up @@ -455,7 +460,7 @@ def _reconstruct_intervals_batch(self, df, interval, prepost, tag=-1):
else:
intraday = True

price_cols = [c for c in ["Open", "High", "Low", "Close", "Adj Close"] if c in df]
price_cols = [c for c in price_colnames if c in df]
data_cols = price_cols + ["Volume"]

# If interval is weekly then can construct with daily. But if smaller intervals then
Expand Down Expand Up @@ -1011,7 +1016,7 @@ def _fix_zeroes(self, df, interval, tz_exchange, prepost):
elif df2.index.tz != tz_exchange:
df2.index = df2.index.tz_convert(tz_exchange)

price_cols = [c for c in ["Open", "High", "Low", "Close", "Adj Close"] if c in df2.columns]
price_cols = [c for c in price_colnames if c in df2.columns]
f_prices_bad = (df2[price_cols] == 0.0) | df2[price_cols].isna()
df2_reserve = None
if intraday:
Expand Down Expand Up @@ -1916,7 +1921,7 @@ def get_balance_sheet(self, proxy=None, as_dict=False, pretty=False, freq="yearl
def get_balancesheet(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
return self.get_balance_sheet(proxy, as_dict, pretty, freq)

def get_cash_flow(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
def get_cash_flow(self, proxy=None, as_dict=False, pretty=False, freq="yearly") -> Union[pd.DataFrame, dict]:
"""
:Parameters:
as_dict: bool
Expand Down Expand Up @@ -1946,31 +1951,31 @@ def get_cash_flow(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
def get_cashflow(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
return self.get_cash_flow(proxy, as_dict, pretty, freq)

def get_dividends(self, proxy=None):
def get_dividends(self, proxy=None) -> pd.Series:
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Dividends" in self._history:
dividends = self._history["Dividends"]
return dividends[dividends != 0]
return []
return pd.Series()

def get_capital_gains(self, proxy=None):
def get_capital_gains(self, proxy=None) -> pd.Series:
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Capital Gains" in self._history:
capital_gains = self._history["Capital Gains"]
return capital_gains[capital_gains != 0]
return []
return _empty_series

def get_splits(self, proxy=None):
def get_splits(self, proxy=None) -> pd.Series:
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Stock Splits" in self._history:
splits = self._history["Stock Splits"]
return splits[splits != 0]
return []
return pd.Series()

def get_actions(self, proxy=None):
def get_actions(self, proxy=None) -> pd.Series:
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Dividends" in self._history and "Stock Splits" in self._history:
Expand All @@ -1979,9 +1984,9 @@ def get_actions(self, proxy=None):
action_columns.append("Capital Gains")
actions = self._history[action_columns]
return actions[actions != 0].dropna(how='all').fillna(0)
return []
return _empty_series

def get_shares(self, proxy=None, as_dict=False):
def get_shares(self, proxy=None, as_dict=False) -> Union[pd.DataFrame, dict]:
self._fundamentals.proxy = proxy or self.proxy
data = self._fundamentals.shares
if as_dict:
Expand Down Expand Up @@ -2078,7 +2083,7 @@ def get_isin(self, proxy=None) -> Optional[str]:
self._isin = data.split(search_str)[1].split('"')[0].split('|')[0]
return self._isin

def get_news(self, proxy=None):
def get_news(self, proxy=None) -> list:
if self._news:
return self._news

Expand Down
18 changes: 16 additions & 2 deletions yfinance/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,14 @@ def initialise(self):

db.connect()
tz_db_proxy.initialize(db)
db.create_tables([_KV])
try:
db.create_tables([_KV])
except _peewee.OperationalError as e:
if 'WITHOUT' in str(e):
_KV._meta.without_rowid = False
db.create_tables([_KV])
else:
raise
self.initialised = 1 # success

def lookup(self, key):
Expand Down Expand Up @@ -344,7 +351,14 @@ def initialise(self):

db.connect()
Cookie_db_proxy.initialize(db)
db.create_tables([_CookieSchema])
try:
db.create_tables([_CookieSchema])
except _peewee.OperationalError as e:
if 'WITHOUT' in str(e):
_CookieSchema._meta.without_rowid = False
db.create_tables([_CookieSchema])
else:
raise
self.initialised = 1 # success

def lookup(self, strategy):
Expand Down
4 changes: 2 additions & 2 deletions yfinance/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,11 +217,11 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_

try:
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
keys=shared._DFS.keys())
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
except Exception:
_realign_dfs()
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
keys=shared._DFS.keys())
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
data.index = _pd.to_datetime(data.index)
# switch names back to isins if applicable
data.rename(columns=shared._ISINS, inplace=True)
Expand Down
7 changes: 4 additions & 3 deletions yfinance/scrapers/quote.py
Original file line number Diff line number Diff line change
Expand Up @@ -733,10 +733,11 @@ def _fetch_complementary(self, proxy):

json_str = self._data.cache_get(url=url, proxy=proxy).text
json_data = json.loads(json_str)
if json_data["timeseries"]["error"] is not None:
raise YFinanceException("Failed to parse json response from Yahoo Finance: " + json_data["error"])
json_result = json_data.get("timeseries") or json_data.get("finance")
if json_result["error"] is not None:
raise YFinanceException("Failed to parse json response from Yahoo Finance: " + str(json_result["error"]))
for k in keys:
keydict = json_data["timeseries"]["result"][0]
keydict = json_result["result"][0]
if k in keydict:
self._info[k] = keydict[k][-1]["reportedValue"]["raw"]
else:
Expand Down
6 changes: 3 additions & 3 deletions yfinance/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def dividends(self) -> _pd.Series:
return self.get_dividends()

@property
def capital_gains(self):
def capital_gains(self) -> _pd.Series:
return self.get_capital_gains()

@property
Expand All @@ -146,7 +146,7 @@ def actions(self) -> _pd.DataFrame:
return self.get_actions()

@property
def shares(self) -> _pd.DataFrame :
def shares(self) -> _pd.DataFrame:
return self.get_shares()

@property
Expand Down Expand Up @@ -259,7 +259,7 @@ def options(self) -> tuple:
return tuple(self._expirations.keys())

@property
def news(self):
def news(self) -> list:
return self.get_news()

@property
Expand Down

0 comments on commit b837c1e

Please sign in to comment.