mirror of
https://github.com/ranaroussi/yfinance.git
synced 2024-01-29 09:38:56 +03:00
@@ -55,7 +55,7 @@ $ pip install yfinance --upgrade --no-cache-dir
|
||||
To install with optional dependencies, replace `optional` with: `nospam` for [caching-requests](#smarter-scraping), `repair` for [price repair](https://github.com/ranaroussi/yfinance/wiki/Price-repair), or `nospam,repair` for both:
|
||||
|
||||
``` {.sourceCode .bash}
|
||||
$ pip install yfinance[optional]
|
||||
$ pip install "yfinance[optional]"
|
||||
```
|
||||
|
||||
[Required dependencies](./requirements.txt) , [all dependencies](./setup.py#L62).
|
||||
|
||||
@@ -8,4 +8,4 @@ pytz>=2022.5
|
||||
frozendict>=2.3.4
|
||||
beautifulsoup4>=4.11.1
|
||||
html5lib>=1.1
|
||||
peewee>=3.16.2
|
||||
peewee>=3.16.2
|
||||
|
||||
@@ -112,8 +112,24 @@ class TestTicker(unittest.TestCase):
|
||||
dat.fast_info[k]
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
assert_attribute_type(self, dat, attribute_name, attribute_type)
|
||||
|
||||
with self.assertRaises(YFNotImplementedError):
|
||||
assert isinstance(dat.earnings, pd.Series)
|
||||
assert dat.earnings.empty
|
||||
assert isinstance(dat.dividends, pd.Series)
|
||||
assert dat.dividends.empty
|
||||
assert isinstance(dat.splits, pd.Series)
|
||||
assert dat.splits.empty
|
||||
assert isinstance(dat.capital_gains, pd.Series)
|
||||
assert dat.capital_gains.empty
|
||||
with self.assertRaises(YFNotImplementedError):
|
||||
assert isinstance(dat.shares, pd.DataFrame)
|
||||
assert dat.shares.empty
|
||||
assert isinstance(dat.actions, pd.DataFrame)
|
||||
assert dat.actions.empty
|
||||
|
||||
|
||||
def test_goodTicker(self):
|
||||
# that yfinance works when full api is called on same instance of ticker
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ import json as _json
|
||||
import logging
|
||||
import time as _time
|
||||
import warnings
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
from urllib.parse import quote as urlencode
|
||||
|
||||
import dateutil as _dateutil
|
||||
@@ -42,7 +42,10 @@ from .scrapers.fundamentals import Fundamentals
|
||||
from .scrapers.holders import Holders
|
||||
from .scrapers.quote import Quote, FastInfo
|
||||
|
||||
from .const import _BASE_URL_, _ROOT_URL_
|
||||
from .const import _BASE_URL_, _ROOT_URL_, price_colnames
|
||||
|
||||
|
||||
_empty_series = pd.Series()
|
||||
|
||||
|
||||
class TickerBase:
|
||||
@@ -426,7 +429,9 @@ class TickerBase:
|
||||
if not actions:
|
||||
df = df.drop(columns=["Dividends", "Stock Splits", "Capital Gains"], errors='ignore')
|
||||
if not keepna:
|
||||
mask_nan_or_zero = (df.isna() | (df == 0)).all(axis=1)
|
||||
data_colnames = price_colnames + ['Volume'] + ['Dividends', 'Stock Splits', 'Capital Gains']
|
||||
data_colnames = [c for c in data_colnames if c in df.columns]
|
||||
mask_nan_or_zero = (df[data_colnames].isna() | (df[data_colnames] == 0)).all(axis=1)
|
||||
df = df.drop(mask_nan_or_zero.index[mask_nan_or_zero])
|
||||
|
||||
logger.debug(f'{self.ticker}: yfinance returning OHLC: {df.index[0]} -> {df.index[-1]}')
|
||||
@@ -455,7 +460,7 @@ class TickerBase:
|
||||
else:
|
||||
intraday = True
|
||||
|
||||
price_cols = [c for c in ["Open", "High", "Low", "Close", "Adj Close"] if c in df]
|
||||
price_cols = [c for c in price_colnames if c in df]
|
||||
data_cols = price_cols + ["Volume"]
|
||||
|
||||
# If interval is weekly then can construct with daily. But if smaller intervals then
|
||||
@@ -1011,7 +1016,7 @@ class TickerBase:
|
||||
elif df2.index.tz != tz_exchange:
|
||||
df2.index = df2.index.tz_convert(tz_exchange)
|
||||
|
||||
price_cols = [c for c in ["Open", "High", "Low", "Close", "Adj Close"] if c in df2.columns]
|
||||
price_cols = [c for c in price_colnames if c in df2.columns]
|
||||
f_prices_bad = (df2[price_cols] == 0.0) | df2[price_cols].isna()
|
||||
df2_reserve = None
|
||||
if intraday:
|
||||
@@ -1916,7 +1921,7 @@ class TickerBase:
|
||||
def get_balancesheet(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
|
||||
return self.get_balance_sheet(proxy, as_dict, pretty, freq)
|
||||
|
||||
def get_cash_flow(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
|
||||
def get_cash_flow(self, proxy=None, as_dict=False, pretty=False, freq="yearly") -> Union[pd.DataFrame, dict]:
|
||||
"""
|
||||
:Parameters:
|
||||
as_dict: bool
|
||||
@@ -1946,31 +1951,31 @@ class TickerBase:
|
||||
def get_cashflow(self, proxy=None, as_dict=False, pretty=False, freq="yearly"):
|
||||
return self.get_cash_flow(proxy, as_dict, pretty, freq)
|
||||
|
||||
def get_dividends(self, proxy=None):
|
||||
def get_dividends(self, proxy=None) -> pd.Series:
|
||||
if self._history is None:
|
||||
self.history(period="max", proxy=proxy)
|
||||
if self._history is not None and "Dividends" in self._history:
|
||||
dividends = self._history["Dividends"]
|
||||
return dividends[dividends != 0]
|
||||
return []
|
||||
return pd.Series()
|
||||
|
||||
def get_capital_gains(self, proxy=None):
|
||||
def get_capital_gains(self, proxy=None) -> pd.Series:
|
||||
if self._history is None:
|
||||
self.history(period="max", proxy=proxy)
|
||||
if self._history is not None and "Capital Gains" in self._history:
|
||||
capital_gains = self._history["Capital Gains"]
|
||||
return capital_gains[capital_gains != 0]
|
||||
return []
|
||||
return _empty_series
|
||||
|
||||
def get_splits(self, proxy=None):
|
||||
def get_splits(self, proxy=None) -> pd.Series:
|
||||
if self._history is None:
|
||||
self.history(period="max", proxy=proxy)
|
||||
if self._history is not None and "Stock Splits" in self._history:
|
||||
splits = self._history["Stock Splits"]
|
||||
return splits[splits != 0]
|
||||
return []
|
||||
return pd.Series()
|
||||
|
||||
def get_actions(self, proxy=None):
|
||||
def get_actions(self, proxy=None) -> pd.Series:
|
||||
if self._history is None:
|
||||
self.history(period="max", proxy=proxy)
|
||||
if self._history is not None and "Dividends" in self._history and "Stock Splits" in self._history:
|
||||
@@ -1979,9 +1984,9 @@ class TickerBase:
|
||||
action_columns.append("Capital Gains")
|
||||
actions = self._history[action_columns]
|
||||
return actions[actions != 0].dropna(how='all').fillna(0)
|
||||
return []
|
||||
return _empty_series
|
||||
|
||||
def get_shares(self, proxy=None, as_dict=False):
|
||||
def get_shares(self, proxy=None, as_dict=False) -> Union[pd.DataFrame, dict]:
|
||||
self._fundamentals.proxy = proxy or self.proxy
|
||||
data = self._fundamentals.shares
|
||||
if as_dict:
|
||||
@@ -2078,7 +2083,7 @@ class TickerBase:
|
||||
self._isin = data.split(search_str)[1].split('"')[0].split('|')[0]
|
||||
return self._isin
|
||||
|
||||
def get_news(self, proxy=None):
|
||||
def get_news(self, proxy=None) -> list:
|
||||
if self._news:
|
||||
return self._news
|
||||
|
||||
|
||||
@@ -145,7 +145,14 @@ class _TzCache:
|
||||
|
||||
db.connect()
|
||||
tz_db_proxy.initialize(db)
|
||||
db.create_tables([_KV])
|
||||
try:
|
||||
db.create_tables([_KV])
|
||||
except _peewee.OperationalError as e:
|
||||
if 'WITHOUT' in str(e):
|
||||
_KV._meta.without_rowid = False
|
||||
db.create_tables([_KV])
|
||||
else:
|
||||
raise
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, key):
|
||||
@@ -344,7 +351,14 @@ class _CookieCache:
|
||||
|
||||
db.connect()
|
||||
Cookie_db_proxy.initialize(db)
|
||||
db.create_tables([_CookieSchema])
|
||||
try:
|
||||
db.create_tables([_CookieSchema])
|
||||
except _peewee.OperationalError as e:
|
||||
if 'WITHOUT' in str(e):
|
||||
_CookieSchema._meta.without_rowid = False
|
||||
db.create_tables([_CookieSchema])
|
||||
else:
|
||||
raise
|
||||
self.initialised = 1 # success
|
||||
|
||||
def lookup(self, strategy):
|
||||
|
||||
@@ -217,11 +217,11 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
|
||||
try:
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
|
||||
except Exception:
|
||||
_realign_dfs()
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
keys=shared._DFS.keys(), names=['Ticker', 'Price'])
|
||||
data.index = _pd.to_datetime(data.index)
|
||||
# switch names back to isins if applicable
|
||||
data.rename(columns=shared._ISINS, inplace=True)
|
||||
|
||||
@@ -733,10 +733,11 @@ class Quote:
|
||||
|
||||
json_str = self._data.cache_get(url=url, proxy=proxy).text
|
||||
json_data = json.loads(json_str)
|
||||
if json_data["timeseries"]["error"] is not None:
|
||||
raise YFinanceException("Failed to parse json response from Yahoo Finance: " + json_data["error"])
|
||||
json_result = json_data.get("timeseries") or json_data.get("finance")
|
||||
if json_result["error"] is not None:
|
||||
raise YFinanceException("Failed to parse json response from Yahoo Finance: " + str(json_result["error"]))
|
||||
for k in keys:
|
||||
keydict = json_data["timeseries"]["result"][0]
|
||||
keydict = json_result["result"][0]
|
||||
if k in keydict:
|
||||
self._info[k] = keydict[k][-1]["reportedValue"]["raw"]
|
||||
else:
|
||||
|
||||
@@ -134,7 +134,7 @@ class Ticker(TickerBase):
|
||||
return self.get_dividends()
|
||||
|
||||
@property
|
||||
def capital_gains(self):
|
||||
def capital_gains(self) -> _pd.Series:
|
||||
return self.get_capital_gains()
|
||||
|
||||
@property
|
||||
@@ -146,7 +146,7 @@ class Ticker(TickerBase):
|
||||
return self.get_actions()
|
||||
|
||||
@property
|
||||
def shares(self) -> _pd.DataFrame :
|
||||
def shares(self) -> _pd.DataFrame:
|
||||
return self.get_shares()
|
||||
|
||||
@property
|
||||
@@ -259,7 +259,7 @@ class Ticker(TickerBase):
|
||||
return tuple(self._expirations.keys())
|
||||
|
||||
@property
|
||||
def news(self):
|
||||
def news(self) -> list:
|
||||
return self.get_news()
|
||||
|
||||
@property
|
||||
|
||||
Reference in New Issue
Block a user