mirror of
https://github.com/ranaroussi/yfinance.git
synced 2024-01-29 09:38:56 +03:00
@@ -1,6 +1,34 @@
|
||||
Change Log
|
||||
===========
|
||||
|
||||
0.2.35
|
||||
------
|
||||
Internal fixes for 0.2.34
|
||||
|
||||
0.2.34
|
||||
------
|
||||
Features:
|
||||
- Add Recommendations Trend Summary #1754
|
||||
- Add Recommendation upgrades & downgrades #1773
|
||||
- Add Insider Roster & Transactions #1772
|
||||
- Moved download() progress bar to STDERR #1776
|
||||
- PIP optional dependencies #1771
|
||||
- Set sensible min versions for optional 'nospam' reqs #1807
|
||||
Fixes
|
||||
- Fix download() DatetimeIndex on invalid symbols #1779
|
||||
- Fix invalid date entering cache DB #1796
|
||||
- Fix Ticker.calendar fetch #1790
|
||||
- Fixed adding complementary to info #1774
|
||||
- Ticker.earnings_dates: fix warning "Value 'NaN' has dtype incompatible with float64" #1810
|
||||
- Minor fixes for price repair and related tests #1768
|
||||
- Fix price repair div adjust #1798
|
||||
- Fix 'raise_errors' argument ignored in Ticker.history() #1806
|
||||
Maintenance
|
||||
- Fix regression: _get_ticker_tz() args were being swapped. Improve its unit test #1793
|
||||
- Refactor Ticker proxy #1711
|
||||
- Add Ruff linter checks #1756
|
||||
- Resolve Pandas FutureWarnings #1766
|
||||
|
||||
0.2.33
|
||||
------
|
||||
Cookie fixes:
|
||||
|
||||
15
README.md
15
README.md
@@ -111,6 +111,11 @@ msft.insider_transactions
|
||||
msft.insider_purchases
|
||||
msft.insider_roster_holders
|
||||
|
||||
# show recommendations
|
||||
msft.recommendations
|
||||
msft.recommendations_summary
|
||||
msft.upgrades_downgrades
|
||||
|
||||
# Show future and historic earnings dates, returns at most next 4 quarters and last 8 quarters by default.
|
||||
# Note: If more are needed use msft.get_earnings_dates(limit=XX) with increased limit argument.
|
||||
msft.earnings_dates
|
||||
@@ -240,11 +245,13 @@ yf.pdr_override() # <== that's all it takes :-)
|
||||
data = pdr.get_data_yahoo("SPY", start="2017-01-01", end="2017-04-30")
|
||||
```
|
||||
|
||||
### Timezone cache store
|
||||
### Persistent cache store
|
||||
|
||||
To reduce Yahoo, yfinance store some data locally: timezones to localize dates, and cookie. Cache location is:
|
||||
- Windows = C:/Users/\<USER\>/AppData/Local/py-yfinance
|
||||
- Linux = /home/\<USER\>/.cache/py-yfinance
|
||||
- MacOS = /Users/\<USER\>/Library/Caches/py-yfinance
|
||||
|
||||
When fetching price data, all dates are localized to stock exchange timezone.
|
||||
But timezone retrieval is relatively slow, so yfinance attemps to cache them
|
||||
in your users cache folder.
|
||||
You can direct cache to use a different location with `set_tz_cache_location()`:
|
||||
```python
|
||||
import yfinance as yf
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% set name = "yfinance" %}
|
||||
{% set version = "0.2.33" %}
|
||||
{% set version = "0.2.35" %}
|
||||
|
||||
package:
|
||||
name: "{{ name|lower }}"
|
||||
|
||||
103
tests/prices.py
103
tests/prices.py
@@ -43,6 +43,18 @@ class TestPriceHistory(unittest.TestCase):
|
||||
|
||||
df_tkrs = df.columns.levels[1]
|
||||
self.assertEqual(sorted(tkrs), sorted(df_tkrs))
|
||||
|
||||
def test_download_with_invalid_ticker(self):
|
||||
#Checks if using an invalid symbol gives the same output as not using an invalid symbol in combination with a valid symbol (AAPL)
|
||||
#Checks to make sure that invalid symbol handling for the date column is the same as the base case (no invalid symbols)
|
||||
|
||||
invalid_tkrs = ["AAPL", "ATVI"] #AAPL exists and ATVI does not exist
|
||||
valid_tkrs = ["AAPL", "INTC"] #AAPL and INTC both exist
|
||||
|
||||
data_invalid_sym = yf.download(invalid_tkrs, start='2023-11-16', end='2023-11-17')
|
||||
data_valid_sym = yf.download(valid_tkrs, start='2023-11-16', end='2023-11-17')
|
||||
|
||||
self.assertEqual(data_invalid_sym['Close']['AAPL']['2023-11-16'],data_valid_sym['Close']['AAPL']['2023-11-16'])
|
||||
|
||||
def test_duplicatingHourly(self):
|
||||
tkrs = ["IMP.JO", "BHG.JO", "SSW.JO", "BP.L", "INTC"]
|
||||
@@ -387,24 +399,20 @@ class TestPriceHistory(unittest.TestCase):
|
||||
raise
|
||||
|
||||
def test_prune_post_intraday_us(self):
|
||||
# Half-day before USA Thanksgiving. Yahoo normally
|
||||
# Half-day at USA Thanksgiving. Yahoo normally
|
||||
# returns an interval starting when regular trading closes,
|
||||
# even if prepost=False.
|
||||
|
||||
# Setup
|
||||
tkr = "AMZN"
|
||||
interval = "1h"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(9, 30)
|
||||
time_close = _dt.time(16)
|
||||
special_day = _dt.date(2022, 11, 25)
|
||||
special_day = _dt.date(2023, 11, 24)
|
||||
time_early_close = _dt.time(13)
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Run
|
||||
start_d = special_day - _dt.timedelta(days=7)
|
||||
end_d = special_day + _dt.timedelta(days=7)
|
||||
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
tg_last_dt = df.loc[str(special_day)].index[-1]
|
||||
self.assertTrue(tg_last_dt.time() < time_early_close)
|
||||
|
||||
@@ -413,87 +421,22 @@ class TestPriceHistory(unittest.TestCase):
|
||||
end_d = _dt.date(special_day.year+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
self.assertEqual(len(early_close_dates), 1)
|
||||
self.assertEqual(early_close_dates[0], special_day)
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
|
||||
def test_prune_post_intraday_omx(self):
|
||||
# Half-day before Sweden Christmas. Yahoo normally
|
||||
# returns an interval starting when regular trading closes,
|
||||
# even if prepost=False.
|
||||
# If prepost=False, test that yfinance is removing prepost intervals.
|
||||
|
||||
# Setup
|
||||
tkr = "AEC.ST"
|
||||
interval = "1h"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(9)
|
||||
time_close = _dt.time(17, 30)
|
||||
special_day = _dt.date(2022, 12, 23)
|
||||
time_early_close = _dt.time(13, 2)
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Half trading day Jan 5, Apr 14, May 25, Jun 23, Nov 4, Dec 23, Dec 30
|
||||
half_days = [_dt.date(special_day.year, x[0], x[1]) for x in [(1, 5), (4, 14), (5, 25), (6, 23), (11, 4), (12, 23), (12, 30)]]
|
||||
|
||||
# Yahoo has incorrectly classified afternoon of 2022-04-13 as post-market.
|
||||
# Nothing yfinance can do because Yahoo doesn't return data with prepost=False.
|
||||
# But need to handle in this test.
|
||||
expected_incorrect_half_days = [_dt.date(2022, 4, 13)]
|
||||
half_days = sorted(half_days+expected_incorrect_half_days)
|
||||
|
||||
# Run
|
||||
start_d = special_day - _dt.timedelta(days=7)
|
||||
end_d = special_day + _dt.timedelta(days=7)
|
||||
df = dat.history(start=start_d, end=end_d, interval=interval, prepost=False, keepna=True)
|
||||
tg_last_dt = df.loc[str(special_day)].index[-1]
|
||||
self.assertTrue(tg_last_dt.time() < time_early_close)
|
||||
|
||||
# Test no other afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(special_day.year, 1, 1)
|
||||
end_d = _dt.date(special_day.year+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
unexpected_early_close_dates = [d for d in early_close_dates if d not in half_days]
|
||||
self.assertEqual(len(unexpected_early_close_dates), 0)
|
||||
self.assertEqual(len(early_close_dates), len(half_days))
|
||||
self.assertTrue(_np.equal(early_close_dates, half_days).all())
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
|
||||
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())
|
||||
|
||||
def test_prune_post_intraday_asx(self):
|
||||
# Setup
|
||||
tkr = "BHP.AX"
|
||||
interval_td = _dt.timedelta(hours=1)
|
||||
time_open = _dt.time(10)
|
||||
time_close = _dt.time(16, 12)
|
||||
# No early closes in 2022
|
||||
# No early closes in 2023
|
||||
dat = yf.Ticker(tkr, session=self.session)
|
||||
|
||||
# Test no afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(2022, 1, 1)
|
||||
end_d = _dt.date(2022+1, 1, 1)
|
||||
# Test no other afternoons (or mornings) were pruned
|
||||
start_d = _dt.date(2023, 1, 1)
|
||||
end_d = _dt.date(2023+1, 1, 1)
|
||||
df = dat.history(start=start_d, end=end_d, interval="1h", prepost=False, keepna=True)
|
||||
last_dts = _pd.Series(df.index).groupby(df.index.date).last()
|
||||
f_early_close = (last_dts+interval_td).dt.time < time_close
|
||||
early_close_dates = last_dts.index[f_early_close].values
|
||||
self.assertEqual(len(early_close_dates), 0)
|
||||
|
||||
first_dts = _pd.Series(df.index).groupby(df.index.date).first()
|
||||
f_late_open = first_dts.dt.time > time_open
|
||||
late_open_dates = first_dts.index[f_late_open]
|
||||
self.assertEqual(len(late_open_dates), 0)
|
||||
dfd = dat.history(start=start_d, end=end_d, interval='1d', prepost=False, keepna=True)
|
||||
self.assertTrue(_np.equal(dfd.index.date, _pd.to_datetime(last_dts.index).date).all())
|
||||
|
||||
def test_weekly_2rows_fix(self):
|
||||
tkr = "AMZN"
|
||||
|
||||
154
tests/ticker.py
154
tests/ticker.py
@@ -17,7 +17,7 @@ from yfinance.exceptions import YFNotImplementedError
|
||||
|
||||
import unittest
|
||||
import requests_cache
|
||||
from typing import Union, Any
|
||||
from typing import Union, Any, get_args, _GenericAlias
|
||||
from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
|
||||
|
||||
ticker_attributes = (
|
||||
@@ -31,11 +31,10 @@ ticker_attributes = (
|
||||
("actions", pd.DataFrame),
|
||||
("shares", pd.DataFrame),
|
||||
("info", dict),
|
||||
("calendar", pd.DataFrame),
|
||||
("calendar", dict),
|
||||
("recommendations", Union[pd.DataFrame, dict]),
|
||||
("recommendations_summary", Union[pd.DataFrame, dict]),
|
||||
("upgrades_downgrades", Union[pd.DataFrame, dict]),
|
||||
("recommendations_history", Union[pd.DataFrame, dict]),
|
||||
("earnings", pd.DataFrame),
|
||||
("quarterly_earnings", pd.DataFrame),
|
||||
("quarterly_cashflow", pd.DataFrame),
|
||||
@@ -58,7 +57,12 @@ def assert_attribute_type(testClass: unittest.TestCase, instance, attribute_name
|
||||
try:
|
||||
attribute = getattr(instance, attribute_name)
|
||||
if attribute is not None and expected_type is not Any:
|
||||
testClass.assertEqual(type(attribute), expected_type)
|
||||
err_msg = f'{attribute_name} type is {type(attribute)} not {expected_type}'
|
||||
if isinstance(expected_type, _GenericAlias) and expected_type.__origin__ is Union:
|
||||
allowed_types = get_args(expected_type)
|
||||
testClass.assertTrue(isinstance(attribute, allowed_types), err_msg)
|
||||
else:
|
||||
testClass.assertEqual(type(attribute), expected_type, err_msg)
|
||||
except Exception:
|
||||
testClass.assertRaises(
|
||||
YFNotImplementedError, lambda: getattr(instance, attribute_name)
|
||||
@@ -136,8 +140,8 @@ class TestTicker(unittest.TestCase):
|
||||
tkr = "IBM"
|
||||
dat = yf.Ticker(tkr, session=self.session, proxy=self.proxy)
|
||||
|
||||
dat._fetch_ticker_tz(timeout=5)
|
||||
dat._get_ticker_tz(timeout=5)
|
||||
dat._fetch_ticker_tz(proxy=None, timeout=5)
|
||||
dat._get_ticker_tz(proxy=None, timeout=5)
|
||||
dat.history(period="1wk")
|
||||
|
||||
for attribute_name, attribute_type in ticker_attributes:
|
||||
@@ -654,63 +658,6 @@ class TestTickerMiscFinancials(unittest.TestCase):
|
||||
def test_bad_freq_value_raises_exception(self):
|
||||
self.assertRaises(ValueError, lambda: self.ticker.get_cashflow(freq="badarg"))
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_sustainability(self):
|
||||
# data = self.ticker.sustainability
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.sustainability
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_recommendations(self):
|
||||
data = self.ticker.recommendations
|
||||
data_summary = self.ticker.recommendations_summary
|
||||
self.assertTrue(data.equals(data_summary))
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.recommendations
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_recommendations_summary(self): # currently alias for recommendations
|
||||
# data = self.ticker.recommendations_summary
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.recommendations_summary
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_recommendations_history(self): # alias for upgrades_downgrades
|
||||
data = self.ticker.upgrades_downgrades
|
||||
data_history = self.ticker.recommendations_history
|
||||
self.assertTrue(data.equals(data_history))
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
self.assertTrue(len(data.columns) == 4, "data has wrong number of columns")
|
||||
self.assertEqual(data.columns.values.tolist(), ['Firm', 'ToGrade', 'FromGrade', 'Action'], "data has wrong column names")
|
||||
self.assertIsInstance(data.index, pd.DatetimeIndex, "data has wrong index type")
|
||||
|
||||
data_cached = self.ticker.upgrades_downgrades
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_analyst_price_target(self):
|
||||
# data = self.ticker.analyst_price_target
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.analyst_price_target
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_revenue_forecasts(self):
|
||||
# data = self.ticker.revenue_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.revenue_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_calendar(self):
|
||||
data = self.ticker.calendar
|
||||
self.assertIsInstance(data, dict, "data has wrong type")
|
||||
@@ -729,12 +676,89 @@ class TestTickerMiscFinancials(unittest.TestCase):
|
||||
data_cached = self.ticker.calendar
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_sustainability(self):
|
||||
# data = self.ticker.sustainability
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.sustainability
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_shares(self):
|
||||
# data = self.ticker.shares
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
|
||||
class TestTickerAnalysts(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.session = session_gbl
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if cls.session is not None:
|
||||
cls.session.close()
|
||||
|
||||
def setUp(self):
|
||||
self.ticker = yf.Ticker("GOOGL", session=self.session)
|
||||
|
||||
def tearDown(self):
|
||||
self.ticker = None
|
||||
|
||||
def test_recommendations(self):
|
||||
data = self.ticker.recommendations
|
||||
data_summary = self.ticker.recommendations_summary
|
||||
self.assertTrue(data.equals(data_summary))
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.recommendations
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_recommendations_summary(self): # currently alias for recommendations
|
||||
data = self.ticker.recommendations_summary
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
data_cached = self.ticker.recommendations_summary
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
def test_upgrades_downgrades(self):
|
||||
data = self.ticker.upgrades_downgrades
|
||||
self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
self.assertFalse(data.empty, "data is empty")
|
||||
self.assertTrue(len(data.columns) == 4, "data has wrong number of columns")
|
||||
self.assertEqual(data.columns.values.tolist(), ['Firm', 'ToGrade', 'FromGrade', 'Action'], "data has wrong column names")
|
||||
self.assertIsInstance(data.index, pd.DatetimeIndex, "data has wrong index type")
|
||||
|
||||
data_cached = self.ticker.upgrades_downgrades
|
||||
self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# Below will fail because not ported to Yahoo API
|
||||
|
||||
# def test_analyst_price_target(self):
|
||||
# data = self.ticker.analyst_price_target
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.analyst_price_target
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
# def test_revenue_forecasts(self):
|
||||
# data = self.ticker.revenue_forecasts
|
||||
# self.assertIsInstance(data, pd.DataFrame, "data has wrong type")
|
||||
# self.assertFalse(data.empty, "data is empty")
|
||||
|
||||
# data_cached = self.ticker.revenue_forecasts
|
||||
# self.assertIs(data, data_cached, "data not cached")
|
||||
|
||||
|
||||
|
||||
class TestTickerInfo(unittest.TestCase):
|
||||
session = None
|
||||
|
||||
@@ -777,11 +801,11 @@ class TestTickerInfo(unittest.TestCase):
|
||||
|
||||
# We don't expect this one to have a trailing PEG ratio
|
||||
data1 = self.tickers[0].info
|
||||
self.assertEqual(data1['trailingPegRatio'], None)
|
||||
self.assertIsNone(data1['trailingPegRatio'])
|
||||
|
||||
# This one should have a trailing PEG ratio
|
||||
data2 = self.tickers[2].info
|
||||
self.assertEqual(data2['trailingPegRatio'], 1.2713)
|
||||
self.assertIsInstance(data2['trailingPegRatio'], float)
|
||||
pass
|
||||
|
||||
# def test_fast_info_matches_info(self):
|
||||
|
||||
@@ -86,7 +86,6 @@ class TickerBase:
|
||||
start=None, end=None, prepost=False, actions=True,
|
||||
auto_adjust=True, back_adjust=False, repair=False, keepna=False,
|
||||
proxy=None, rounding=False, timeout=10,
|
||||
debug=None, # deprecated
|
||||
raise_errors=False) -> pd.DataFrame:
|
||||
"""
|
||||
:Parameters:
|
||||
@@ -126,23 +125,12 @@ class TickerBase:
|
||||
If not None stops waiting for a response after given number of
|
||||
seconds. (Can also be a fraction of a second e.g. 0.01)
|
||||
Default is 10 seconds.
|
||||
debug: bool
|
||||
If passed as False, will suppress message printing to console.
|
||||
DEPRECATED, will be removed in future version
|
||||
raise_errors: bool
|
||||
If True, then raise errors as Exceptions instead of logging.
|
||||
"""
|
||||
logger = utils.get_yf_logger()
|
||||
proxy = proxy or self.proxy
|
||||
|
||||
if debug is not None:
|
||||
if debug:
|
||||
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead: logging.getLogger('yfinance').setLevel(logging.ERROR)")
|
||||
logger.setLevel(logging.ERROR)
|
||||
else:
|
||||
utils.print_once(f"yfinance: Ticker.history(debug={debug}) argument is deprecated and will be removed in future version. Do this instead to suppress error messages: logging.getLogger('yfinance').setLevel(logging.CRITICAL)")
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
|
||||
start_user = start
|
||||
end_user = end
|
||||
if start or period is None or period.lower() == "max":
|
||||
@@ -395,9 +383,6 @@ class TickerBase:
|
||||
|
||||
df = df[~df.index.duplicated(keep='first')] # must do before repair
|
||||
|
||||
if isinstance(repair, str) and repair=='silent':
|
||||
utils.log_once(logging.WARNING, "yfinance: Ticker.history(repair='silent') value is deprecated and will be removed in future version. Repair now silent by default, use logging module to increase verbosity.")
|
||||
repair = True
|
||||
if repair:
|
||||
# Do this before auto/back adjust
|
||||
logger.debug(f'{self.ticker}: checking OHLC for repairs ...')
|
||||
|
||||
@@ -222,7 +222,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
|
||||
_realign_dfs()
|
||||
data = _pd.concat(shared._DFS.values(), axis=1, sort=True,
|
||||
keys=shared._DFS.keys())
|
||||
|
||||
data.index = _pd.to_datetime(data.index)
|
||||
# switch names back to isins if applicable
|
||||
data.rename(columns=shared._ISINS, inplace=True)
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# from io import StringIO
|
||||
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import YfData
|
||||
from yfinance.const import _BASE_URL_
|
||||
from yfinance.exceptions import YFinanceDataException
|
||||
@@ -76,7 +78,21 @@ class Holders:
|
||||
return result
|
||||
|
||||
def _fetch_and_parse(self):
|
||||
result = self._fetch(self.proxy)
|
||||
try:
|
||||
result = self._fetch(self.proxy)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
utils.get_yf_logger().error(str(e))
|
||||
|
||||
self._major = pd.DataFrame()
|
||||
self._major_direct_holders = pd.DataFrame()
|
||||
self._institutional = pd.DataFrame()
|
||||
self._mutualfund = pd.DataFrame()
|
||||
self._insider_transactions = pd.DataFrame()
|
||||
self._insider_purchases = pd.DataFrame()
|
||||
self._insider_roster = pd.DataFrame()
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]
|
||||
# parse "institutionOwnership", "fundOwnership", "majorDirectHolders", "majorHoldersBreakdown", "insiderTransactions", "insiderHolders", "netSharePurchaseActivity"
|
||||
@@ -227,4 +243,4 @@ class Holders:
|
||||
).convert_dtypes()
|
||||
self._insider_purchases = df
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import MutableMapping
|
||||
|
||||
import numpy as _np
|
||||
import pandas as pd
|
||||
import requests
|
||||
|
||||
from yfinance import utils
|
||||
from yfinance.data import YfData
|
||||
@@ -585,28 +586,34 @@ class Quote:
|
||||
def recommendations(self) -> pd.DataFrame:
|
||||
if self._recommendations is None:
|
||||
result = self._fetch(self.proxy, modules=['recommendationTrend'])
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["recommendationTrend"]["trend"]
|
||||
except (KeyError, IndexError):
|
||||
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
self._recommendations = pd.DataFrame(data)
|
||||
if result is None:
|
||||
self._recommendations = pd.DataFrame()
|
||||
else:
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["recommendationTrend"]["trend"]
|
||||
except (KeyError, IndexError):
|
||||
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
self._recommendations = pd.DataFrame(data)
|
||||
return self._recommendations
|
||||
|
||||
@property
|
||||
def upgrades_downgrades(self) -> pd.DataFrame:
|
||||
if self._upgrades_downgrades is None:
|
||||
result = self._fetch(self.proxy, modules=['upgradeDowngradeHistory'])
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["upgradeDowngradeHistory"]["history"]
|
||||
if len(data) == 0:
|
||||
raise YFinanceDataException(f"No upgrade/downgrade history found for {self._symbol}")
|
||||
df = pd.DataFrame(data)
|
||||
df.rename(columns={"epochGradeDate": "GradeDate", 'firm': 'Firm', 'toGrade': 'ToGrade', 'fromGrade': 'FromGrade', 'action': 'Action'}, inplace=True)
|
||||
df.set_index('GradeDate', inplace=True)
|
||||
df.index = pd.to_datetime(df.index, unit='s')
|
||||
self._upgrades_downgrades = df
|
||||
except (KeyError, IndexError):
|
||||
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
if result is None:
|
||||
self._upgrades_downgrades = pd.DataFrame()
|
||||
else:
|
||||
try:
|
||||
data = result["quoteSummary"]["result"][0]["upgradeDowngradeHistory"]["history"]
|
||||
if len(data) == 0:
|
||||
raise YFinanceDataException(f"No upgrade/downgrade history found for {self._symbol}")
|
||||
df = pd.DataFrame(data)
|
||||
df.rename(columns={"epochGradeDate": "GradeDate", 'firm': 'Firm', 'toGrade': 'ToGrade', 'fromGrade': 'FromGrade', 'action': 'Action'}, inplace=True)
|
||||
df.set_index('GradeDate', inplace=True)
|
||||
df.index = pd.to_datetime(df.index, unit='s')
|
||||
self._upgrades_downgrades = df
|
||||
except (KeyError, IndexError):
|
||||
raise YFinanceDataException(f"Failed to parse json response from Yahoo Finance: {result}")
|
||||
return self._upgrades_downgrades
|
||||
|
||||
@property
|
||||
@@ -627,7 +634,11 @@ class Quote:
|
||||
if len(modules) == 0:
|
||||
raise YFinanceException("No valid modules provided, see available modules using `valid_modules`")
|
||||
params_dict = {"modules": modules, "corsDomain": "finance.yahoo.com", "formatted": "false", "symbol": self._symbol}
|
||||
result = self._data.get_raw_json(_QUOTE_SUMMARY_URL_ + f"/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
|
||||
try:
|
||||
result = self._data.get_raw_json(_QUOTE_SUMMARY_URL_ + f"/{self._symbol}", user_agent_headers=self._data.user_agent_headers, params=params_dict, proxy=proxy)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
utils.get_yf_logger().error(str(e))
|
||||
return None
|
||||
return result
|
||||
|
||||
def _fetch_info(self, proxy):
|
||||
@@ -636,6 +647,10 @@ class Quote:
|
||||
self._already_fetched = True
|
||||
modules = ['financialData', 'quoteType', 'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
|
||||
result = self._fetch(proxy, modules=modules)
|
||||
if result is None:
|
||||
self._info = {}
|
||||
return
|
||||
|
||||
result["quoteSummary"]["result"][0]["symbol"] = self._symbol
|
||||
query1_info = next(
|
||||
(info for info in result.get("quoteSummary", {}).get("result", []) if info["symbol"] == self._symbol),
|
||||
@@ -730,6 +745,10 @@ class Quote:
|
||||
def _fetch_calendar(self):
|
||||
# secFilings return too old data, so not requesting it for now
|
||||
result = self._fetch(self.proxy, modules=['calendarEvents'])
|
||||
if result is None:
|
||||
self._calendar = {}
|
||||
return
|
||||
|
||||
try:
|
||||
self._calendar = dict()
|
||||
_events = result["quoteSummary"]["result"][0]["calendarEvents"]
|
||||
|
||||
@@ -176,10 +176,6 @@ class Ticker(TickerBase):
|
||||
def upgrades_downgrades(self):
|
||||
return self.get_upgrades_downgrades()
|
||||
|
||||
@property
|
||||
def recommendations_history(self):
|
||||
return self.get_upgrades_downgrades()
|
||||
|
||||
@property
|
||||
def earnings(self) -> _pd.DataFrame:
|
||||
return self.get_earnings()
|
||||
|
||||
@@ -1 +1 @@
|
||||
version = "0.2.33"
|
||||
version = "0.2.35"
|
||||
|
||||
Reference in New Issue
Block a user