Compare commits

..

14 commits

Author SHA1 Message Date
Chris Berkhout
3aa09084ed Version 1.4.12. 2024-09-15 12:17:10 +00:00
Chris Berkhout
ab507b189c Update live test. 2024-09-15 12:16:36 +00:00
Chris Berkhout
53f39a26ef More time correction. 2024-09-15 11:35:56 +00:00
Chris Berkhout
dffe6f8e89 Timezone handling tweak. 2024-09-15 13:15:18 +02:00
Chris Berkhout
c78154df3a Add missing file. 2024-09-15 13:07:50 +02:00
Chris Berkhout
1164724ffb Version 1.4.11. 2024-09-15 13:01:11 +02:00
Chris Berkhout
77b2776e55 yahoo: More graceful handling of responses with meta but no timestamps. 2024-09-15 12:59:39 +02:00
Chris Berkhout
ee8ca0573d yahoo: add back null handling, improve timestamp handling.
Thanks @arkn98!
2024-09-15 12:46:45 +02:00
Chris Berkhout
b6f4c17530 Skip coindesk live test. 2024-09-14 22:49:45 +02:00
Chris Berkhout
5e75759b0f Version 1.4.10. 2024-09-14 22:24:46 +02:00
Chris Berkhout
59574e9156 Fix yahoo source. 2024-09-14 22:22:35 +02:00
Chris Berkhout
51e297b752 Update alphavantage source notes regarding API rate limit. 2024-08-03 17:23:53 +02:00
Chris Berkhout
b7d0d739ab Version 1.4.9. 2024-08-03 17:19:36 +02:00
Chris Berkhout
e8dec0bf64 Update Alpha Vantage rate limit handling. 2024-08-03 17:15:17 +02:00
14 changed files with 621 additions and 324 deletions

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "pricehist" name = "pricehist"
version = "1.4.8" version = "1.4.12"
description = "Fetch and format historical price data" description = "Fetch and format historical price data"
authors = ["Chris Berkhout <chris@chrisberkhout.com>"] authors = ["Chris Berkhout <chris@chrisberkhout.com>"]
license = "MIT" license = "MIT"

View file

@ -1 +1 @@
__version__ = "1.4.8" __version__ = "1.4.12"

View file

@ -56,10 +56,8 @@ class AlphaVantage(BaseSource):
"Beware that digital currencies quoted in non-USD currencies may " "Beware that digital currencies quoted in non-USD currencies may "
"be converted from USD data at one recent exchange rate rather " "be converted from USD data at one recent exchange rate rather "
"than using historical rates.\n" "than using historical rates.\n"
"Alpha Vantage's standard API call frequency limits is 5 calls per " "Alpha Vantage's standard API rate limit is 25 requests per day. "
"minute and 500 per day, so you may need to pause between successive " "Note that retrieving prices for one stock consumes two API calls."
"commands. Note that retrieving prices for one stock consumes two "
"API calls."
) )
def _stock_symbols_message(self): def _stock_symbols_message(self):
@ -337,8 +335,8 @@ class AlphaVantage(BaseSource):
def _raise_for_generic_errors(self, data): def _raise_for_generic_errors(self, data):
if type(data) is dict: if type(data) is dict:
if "Note" in data and "call frequency" in data["Note"]: if "Information" in data and "daily rate limits" in data["Information"]:
raise exceptions.RateLimit(data["Note"]) raise exceptions.RateLimit(data["Information"])
if ( if (
"Information" in data "Information" in data
and "unlock" in data["Information"] and "unlock" in data["Information"]

View file

@ -1,4 +1,3 @@
import csv
import dataclasses import dataclasses
import json import json
import logging import logging
@ -71,63 +70,39 @@ class Yahoo(BaseSource):
series.base, series.quote, self, "Don't specify the quote currency." series.base, series.quote, self, "Don't specify the quote currency."
) )
quote, history = self._data(series) data = self._data(series)
quote = data["chart"]["result"][0]["meta"]["currency"]
offset = data["chart"]["result"][0]["meta"]["gmtoffset"]
timestamps = data["chart"]["result"][0]["timestamp"]
adjclose_data = data["chart"]["result"][0]["indicators"]["adjclose"][0]
rest_data = data["chart"]["result"][0]["indicators"]["quote"][0]
amounts = {**adjclose_data, **rest_data}
prices = [ prices = [
Price(row["date"], amount) Price(date, amount)
for row in history for i in range(len(timestamps))
if (amount := self._amount(row, series.type)) if (date := self._ts_to_date(timestamps[i] + offset)) <= series.end
if (amount := self._amount(amounts, series.type, i)) is not None
] ]
return dataclasses.replace(series, quote=quote, prices=prices) return dataclasses.replace(series, quote=quote, prices=prices)
def _amount(self, row, type): def _ts_to_date(self, ts) -> str:
if type == "mid" and row["high"] != "null" and row["low"] != "null": return datetime.fromtimestamp(ts, tz=timezone.utc).date().isoformat()
return sum([Decimal(row["high"]), Decimal(row["low"])]) / 2
elif row[type] != "null": def _amount(self, amounts, type, i):
return Decimal(row[type]) if type == "mid" and amounts["high"] != "null" and amounts["low"] != "null":
return sum([Decimal(amounts["high"][i]), Decimal(amounts["low"][i])]) / 2
elif amounts[type] != "null" and amounts[type][i] is not None:
return Decimal(amounts[type][i])
else: else:
return None return None
def _data(self, series) -> (dict, csv.DictReader): def _data(self, series) -> dict:
base_url = "https://query1.finance.yahoo.com/v7/finance" base_url = "https://query1.finance.yahoo.com/v8/finance/chart"
headers = {"User-Agent": f"pricehist/{__version__}"} headers = {"User-Agent": f"pricehist/{__version__}"}
url = f"{base_url}/{series.base}"
spark_url = f"{base_url}/spark"
spark_params = {
"symbols": series.base,
"range": "1d",
"interval": "1d",
"indicators": "close",
"includeTimestamps": "false",
"includePrePost": "false",
}
try:
spark_response = self.log_curl(
requests.get(spark_url, params=spark_params, headers=headers)
)
except Exception as e:
raise exceptions.RequestError(str(e)) from e
code = spark_response.status_code
text = spark_response.text
if code == 404 and "No data found for spark symbols" in text:
raise exceptions.InvalidPair(
series.base, series.quote, self, "Symbol not found."
)
try:
spark_response.raise_for_status()
except Exception as e:
raise exceptions.BadResponse(str(e)) from e
try:
spark = json.loads(spark_response.content)
quote = spark["spark"]["result"][0]["response"][0]["meta"]["currency"]
except Exception as e:
raise exceptions.ResponseParsingError(
"The spark data couldn't be parsed. "
) from e
start_ts = int( start_ts = int(
datetime.strptime(series.start, "%Y-%m-%d") datetime.strptime(series.start, "%Y-%m-%d")
@ -142,34 +117,35 @@ class Yahoo(BaseSource):
24 * 60 * 60 24 * 60 * 60
) # some symbols require padding on the end timestamp ) # some symbols require padding on the end timestamp
history_url = f"{base_url}/download/{series.base}" params = {
history_params = { "symbol": series.base,
"period1": start_ts, "period1": start_ts,
"period2": end_ts, "period2": end_ts,
"interval": "1d", "interval": "1d",
"events": "history", "events": "capitalGain%7Cdiv%7Csplit",
"includeAdjustedClose": "true", "includeAdjustedClose": "true",
"formatted": "true",
"userYfid": "true",
"lang": "en-US",
"region": "US",
} }
try: try:
history_response = self.log_curl( response = self.log_curl(requests.get(url, params=params, headers=headers))
requests.get(history_url, params=history_params, headers=headers)
)
except Exception as e: except Exception as e:
raise exceptions.RequestError(str(e)) from e raise exceptions.RequestError(str(e)) from e
code = history_response.status_code code = response.status_code
text = history_response.text text = response.text
if code == 404 and "No data found, symbol may be delisted" in text: if code == 404 and "No data found, symbol may be delisted" in text:
raise exceptions.InvalidPair( raise exceptions.InvalidPair(
series.base, series.quote, self, "Symbol not found." series.base, series.quote, self, "Symbol not found."
) )
if code == 400 and "Data doesn't exist" in text: elif code == 400 and "Data doesn't exist" in text:
raise exceptions.BadResponse( raise exceptions.BadResponse(
"No data for the given interval. Try requesting a larger interval." "No data for the given interval. Try requesting a larger interval."
) )
elif code == 404 and "Timestamp data missing" in text: elif code == 404 and "Timestamp data missing" in text:
raise exceptions.BadResponse( raise exceptions.BadResponse(
"Data missing. The given interval may be for a gap in the data " "Data missing. The given interval may be for a gap in the data "
@ -177,20 +153,21 @@ class Yahoo(BaseSource):
) )
try: try:
history_response.raise_for_status() response.raise_for_status()
except Exception as e: except Exception as e:
raise exceptions.BadResponse(str(e)) from e raise exceptions.BadResponse(str(e)) from e
try: try:
history_lines = history_response.content.decode("utf-8").splitlines() data = json.loads(response.content)
history_lines[0] = history_lines[0].lower().replace(" ", "")
history = csv.DictReader(history_lines, delimiter=",")
except Exception as e: except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e raise exceptions.ResponseParsingError(
"The data couldn't be parsed. "
) from e
if history_lines[0] != "date,open,high,low,close,adjclose,volume": if "timestamp" not in data["chart"]["result"][0]:
raise exceptions.ResponseParsingError("Unexpected CSV format") raise exceptions.BadResponse(
"No data for the given interval. "
"There may be a problem with the symbol or the interval."
)
requested_history = [row for row in history if row["date"] <= series.end] return data
return (quote, requested_history)

View file

@ -75,10 +75,10 @@ name="Alpha Vantage physical currency"
cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14" cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-11,AUD,EUR,0.63318,alphavantage,close 2021-01-11,AUD,EUR,0.63374,alphavantage,close
2021-01-12,AUD,EUR,0.63664,alphavantage,close 2021-01-12,AUD,EUR,0.63684,alphavantage,close
2021-01-13,AUD,EUR,0.63585,alphavantage,close 2021-01-13,AUD,EUR,0.63686,alphavantage,close
2021-01-14,AUD,EUR,0.63960,alphavantage,close 2021-01-14,AUD,EUR,0.63984,alphavantage,close
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"
@ -116,18 +116,17 @@ date,base,quote,amount,source,type
2021-01-07,BTC,EUR,31208.49,coinbasepro,mid 2021-01-07,BTC,EUR,31208.49,coinbasepro,mid
2021-01-08,BTC,EUR,32019,coinbasepro,mid 2021-01-08,BTC,EUR,32019,coinbasepro,mid
END END
run_test "$name" "$cmd" "$expected" skip_test "$name" "$cmd" "$expected"
name="CoinDesk Bitcoin Price Index" name="CoinDesk Bitcoin Price Index v1"
cmd="pricehist fetch coindesk BTC/EUR -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch coindeskbpi BTC/USD -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
WARNING This source is deprecated. Data stops at 2022-07-10.
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,BTC,EUR,26135.4901,coindesk,close 2021-01-04,BTC,USD,31431.6123,coindeskbpi,close
2021-01-05,BTC,EUR,27677.9141,coindesk,close 2021-01-05,BTC,USD,34433.6065,coindeskbpi,close
2021-01-06,BTC,EUR,29871.4301,coindesk,close 2021-01-06,BTC,USD,36275.7563,coindeskbpi,close
2021-01-07,BTC,EUR,32183.1594,coindesk,close 2021-01-07,BTC,USD,39713.5079,coindeskbpi,close
2021-01-08,BTC,EUR,33238.5724,coindesk,close 2021-01-08,BTC,USD,40519.4486,coindeskbpi,close
END END
skip_test "$name" "$cmd" "$expected" skip_test "$name" "$cmd" "$expected"
@ -159,11 +158,11 @@ name="Yahoo! Finance"
cmd="pricehist fetch yahoo TSLA -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch yahoo TSLA -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,TSLA,USD,243.256668,yahoo,adjclose 2021-01-04,TSLA,USD,243.2566680908203125,yahoo,adjclose
2021-01-05,TSLA,USD,245.036667,yahoo,adjclose 2021-01-05,TSLA,USD,245.0366668701171875,yahoo,adjclose
2021-01-06,TSLA,USD,251.993332,yahoo,adjclose 2021-01-06,TSLA,USD,251.9933319091796875,yahoo,adjclose
2021-01-07,TSLA,USD,272.013336,yahoo,adjclose 2021-01-07,TSLA,USD,272.013336181640625,yahoo,adjclose
2021-01-08,TSLA,USD,293.339996,yahoo,adjclose 2021-01-08,TSLA,USD,293.339996337890625,yahoo,adjclose
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"

View file

@ -59,11 +59,11 @@ digital_url = re.compile(
) )
rate_limit_json = ( rate_limit_json = (
'{ "Note": "' '{ "Information": "'
"Thank you for using Alpha Vantage! Our standard API call frequency is 5 " "Thank you for using Alpha Vantage! Our standard API rate limit is 25 "
"calls per minute and 500 calls per day. Please visit " "requests per day. Please subscribe to any of the premium plans at "
"https://www.alphavantage.co/premium/ if you would like to target a higher " "https://www.alphavantage.co/premium/ to instantly remove all daily rate "
"API call frequency." "limits."
'" }' '" }'
) )

View file

@ -36,38 +36,28 @@ def requests_mock():
yield mock yield mock
spark_url = "https://query1.finance.yahoo.com/v7/finance/spark" def url(base):
return f"https://query1.finance.yahoo.com/v8/finance/chart/{base}"
def history_url(base):
return f"https://query1.finance.yahoo.com/v7/finance/download/{base}"
@pytest.fixture
def spark_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "tsla-spark.json").read_text()
requests_mock.add(responses.GET, spark_url, body=json, status=200)
yield requests_mock
@pytest.fixture @pytest.fixture
def recent_ok(requests_mock): def recent_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "tsla-recent.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "tsla-recent.json").read_text()
requests_mock.add(responses.GET, history_url("TSLA"), body=json, status=200) requests_mock.add(responses.GET, url("TSLA"), body=json, status=200)
yield requests_mock yield requests_mock
@pytest.fixture @pytest.fixture
def long_ok(requests_mock): def long_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "ibm-long-partial.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "ibm-long-partial.json").read_text()
requests_mock.add(responses.GET, history_url("IBM"), body=json, status=200) requests_mock.add(responses.GET, url("IBM"), body=json, status=200)
yield requests_mock yield requests_mock
@pytest.fixture @pytest.fixture
def date_with_nulls_ok(requests_mock): def with_null_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "ibm-date-with-nulls.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "inrx-with-null.json").read_text()
requests_mock.add(responses.GET, history_url("IBM"), body=json, status=200) requests_mock.add(responses.GET, url("INR=X"), body=json, status=200)
yield requests_mock yield requests_mock
@ -105,59 +95,57 @@ def test_symbols(src, caplog):
assert any(["Find the symbol of interest on" in r.message for r in caplog.records]) assert any(["Find the symbol of interest on" in r.message for r in caplog.records])
def test_fetch_known(src, type, spark_ok, recent_ok): def test_fetch_known(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
spark_req = recent_ok.calls[0].request req = recent_ok.calls[0].request
hist_req = recent_ok.calls[1].request assert req.params["events"] == "capitalGain%7Cdiv%7Csplit"
assert spark_req.params["symbols"] == "TSLA" assert req.params["includeAdjustedClose"] == "true"
assert hist_req.params["events"] == "history"
assert hist_req.params["includeAdjustedClose"] == "true"
assert (series.base, series.quote) == ("TSLA", "USD") assert (series.base, series.quote) == ("TSLA", "USD")
assert len(series.prices) == 5 assert len(series.prices) == 5
def test_fetch_requests_and_receives_correct_times(src, type, spark_ok, recent_ok): def test_fetch_requests_and_receives_correct_times(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
hist_req = recent_ok.calls[1].request req = recent_ok.calls[0].request
assert hist_req.params["period1"] == str(timestamp("2021-01-04")) assert req.params["period1"] == str(timestamp("2021-01-04"))
assert hist_req.params["period2"] == str(timestamp("2021-01-09")) # rounded up one assert req.params["period2"] == str(timestamp("2021-01-09")) # rounded up one
assert hist_req.params["interval"] == "1d" assert req.params["interval"] == "1d"
assert series.prices[0] == Price("2021-01-04", Decimal("729.770020")) assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
assert series.prices[-1] == Price("2021-01-08", Decimal("880.020020")) assert series.prices[-1] == Price("2021-01-08", Decimal("293.339996337890625"))
def test_fetch_ignores_any_extra_row(src, type, spark_ok, recent_ok): def test_fetch_ignores_any_extra_row(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-07")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-07"))
assert series.prices[0] == Price("2021-01-04", Decimal("729.770020")) assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
assert series.prices[-1] == Price("2021-01-07", Decimal("816.039978")) assert series.prices[-1] == Price("2021-01-07", Decimal("272.013336181640625"))
def test_fetch_requests_logged(src, type, spark_ok, recent_ok, caplog): def test_fetch_requests_logged(src, type, recent_ok, caplog):
with caplog.at_level(logging.DEBUG): with caplog.at_level(logging.DEBUG):
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
logged_requests = 0 logged_requests = 0
for r in caplog.records: for r in caplog.records:
if r.levelname == "DEBUG" and "curl " in r.message: if r.levelname == "DEBUG" and "curl " in r.message:
logged_requests += 1 logged_requests += 1
assert logged_requests == 2 assert logged_requests == 1
def test_fetch_types_all_available(src, spark_ok, recent_ok): def test_fetch_types_all_available(src, recent_ok):
adj = src.fetch(Series("TSLA", "", "adjclose", "2021-01-04", "2021-01-08")) adj = src.fetch(Series("TSLA", "", "adjclose", "2021-01-04", "2021-01-08"))
opn = src.fetch(Series("TSLA", "", "open", "2021-01-04", "2021-01-08")) opn = src.fetch(Series("TSLA", "", "open", "2021-01-04", "2021-01-08"))
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")) hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08"))
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")) low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08"))
cls = src.fetch(Series("TSLA", "", "close", "2021-01-04", "2021-01-08")) cls = src.fetch(Series("TSLA", "", "close", "2021-01-04", "2021-01-08"))
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")) mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08"))
assert adj.prices[0].amount == Decimal("729.770020") assert adj.prices[0].amount == Decimal("243.2566680908203125")
assert opn.prices[0].amount == Decimal("719.460022") assert opn.prices[0].amount == Decimal("239.82000732421875")
assert hgh.prices[0].amount == Decimal("744.489990") assert hgh.prices[0].amount == Decimal("248.163330078125")
assert low.prices[0].amount == Decimal("717.190002") assert low.prices[0].amount == Decimal("239.0633392333984375")
assert cls.prices[0].amount == Decimal("729.770020") assert cls.prices[0].amount == Decimal("243.2566680908203125")
assert mid.prices[0].amount == Decimal("730.839996") assert mid.prices[0].amount == Decimal("243.61333465576171875")
def test_fetch_type_mid_is_mean_of_low_and_high(src, spark_ok, recent_ok): def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_ok):
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")).prices mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")).prices
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")).prices hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")).prices
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")).prices low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")).prices
@ -169,29 +157,29 @@ def test_fetch_type_mid_is_mean_of_low_and_high(src, spark_ok, recent_ok):
) )
def test_fetch_from_before_start(src, type, spark_ok, long_ok): def test_fetch_from_before_start(src, type, long_ok):
series = src.fetch(Series("IBM", "", type, "1900-01-01", "2021-01-08")) series = src.fetch(Series("IBM", "", type, "1900-01-01", "2021-01-08"))
assert series.prices[0] == Price("1962-01-02", Decimal("1.837710")) assert series.prices[0] == Price("1962-01-02", Decimal("1.5133211612701416015625"))
assert series.prices[-1] == Price("2021-01-08", Decimal("125.433624")) assert series.prices[-1] == Price("2021-01-08", Decimal("103.2923736572265625"))
assert len(series.prices) > 9 assert len(series.prices) > 9
def test_fetch_skips_dates_with_nulls(src, type, spark_ok, date_with_nulls_ok): def test_fetch_skips_dates_with_nulls(src, type, with_null_ok):
series = src.fetch(Series("IBM", "", type, "2021-01-05", "2021-01-07")) series = src.fetch(Series("INR=X", "", type, "2017-07-10", "2017-07-12"))
assert series.prices[0] == Price("2021-01-05", Decimal("123.101204")) assert series.prices[0] == Price("2017-07-10", Decimal("64.61170196533203125"))
assert series.prices[1] == Price("2021-01-07", Decimal("125.882545")) assert series.prices[1] == Price("2017-07-12", Decimal("64.52559661865234375"))
assert len(series.prices) == 2 assert len(series.prices) == 2
def test_fetch_to_future(src, type, spark_ok, recent_ok): def test_fetch_to_future(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08"))
assert len(series.prices) > 0 assert len(series.prices) > 0
def test_fetch_no_data_in_past(src, type, spark_ok, requests_mock): def test_fetch_no_data_in_past(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=400, status=400,
body=( body=(
"400 Bad Request: Data doesn't exist for " "400 Bad Request: Data doesn't exist for "
@ -203,10 +191,10 @@ def test_fetch_no_data_in_past(src, type, spark_ok, requests_mock):
assert "No data for the given interval" in str(e.value) assert "No data for the given interval" in str(e.value)
def test_fetch_no_data_in_future(src, type, spark_ok, requests_mock): def test_fetch_no_data_in_future(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=400, status=400,
body=( body=(
"400 Bad Request: Data doesn't exist for " "400 Bad Request: Data doesn't exist for "
@ -218,10 +206,10 @@ def test_fetch_no_data_in_future(src, type, spark_ok, requests_mock):
assert "No data for the given interval" in str(e.value) assert "No data for the given interval" in str(e.value)
def test_fetch_no_data_on_weekend(src, type, spark_ok, requests_mock): def test_fetch_no_data_on_weekend(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=404, status=404,
body="404 Not Found: Timestamp data missing.", body="404 Not Found: Timestamp data missing.",
) )
@ -233,30 +221,7 @@ def test_fetch_no_data_on_weekend(src, type, spark_ok, requests_mock):
def test_fetch_bad_sym(src, type, requests_mock): def test_fetch_bad_sym(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
spark_url, url("NOTABASE"),
status=404,
body="""{
"spark": {
"result": null,
"error": {
"code": "Not Found",
"description": "No data found for spark symbols"
}
}
}""",
)
with pytest.raises(exceptions.InvalidPair) as e:
src.fetch(Series("NOTABASE", "", type, "2021-01-04", "2021-01-08"))
assert "Symbol not found" in str(e.value)
def test_fetch_bad_sym_history(src, type, spark_ok, requests_mock):
# In practice the spark history requests should succeed or fail together.
# This extra test ensures that a failure of the the history part is handled
# correctly even if the spark part succeeds.
requests_mock.add(
responses.GET,
history_url("NOTABASE"),
status=404, status=404,
body="404 Not Found: No data found, symbol may be delisted", body="404 Not Found: No data found, symbol may be delisted",
) )
@ -271,61 +236,23 @@ def test_fetch_giving_quote(src, type):
assert "quote currency" in str(e.value) assert "quote currency" in str(e.value)
def test_fetch_spark_network_issue(src, type, requests_mock): def test_fetch_network_issue(src, type, requests_mock):
body = requests.exceptions.ConnectionError("Network issue") body = requests.exceptions.ConnectionError("Network issue")
requests_mock.add(responses.GET, spark_url, body=body) requests_mock.add(responses.GET, url("TSLA"), body=body)
with pytest.raises(exceptions.RequestError) as e: with pytest.raises(exceptions.RequestError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Network issue" in str(e.value) assert "Network issue" in str(e.value)
def test_fetch_spark_bad_status(src, type, requests_mock): def test_fetch_bad_status(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, status=500, body="Some other reason") requests_mock.add(responses.GET, url("TSLA"), status=500, body="Some other reason")
with pytest.raises(exceptions.BadResponse) as e: with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Internal Server Error" in str(e.value) assert "Internal Server Error" in str(e.value)
def test_fetch_spark_parsing_error(src, type, requests_mock): def test_fetch_parsing_error(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, body="NOT JSON") requests_mock.add(responses.GET, url("TSLA"), body="")
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "spark data couldn't be parsed" in str(e.value)
def test_fetch_spark_unexpected_json(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, body='{"notdata": []}')
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "spark data couldn't be parsed" in str(e.value)
def test_fetch_history_network_issue(src, type, spark_ok, requests_mock):
body = requests.exceptions.ConnectionError("Network issue")
requests_mock.add(responses.GET, history_url("TSLA"), body=body)
with pytest.raises(exceptions.RequestError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Network issue" in str(e.value)
def test_fetch_history_bad_status(src, type, spark_ok, requests_mock):
requests_mock.add(
responses.GET, history_url("TSLA"), status=500, body="Some other reason"
)
with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Internal Server Error" in str(e.value)
def test_fetch_history_parsing_error(src, type, spark_ok, requests_mock):
requests_mock.add(responses.GET, history_url("TSLA"), body="")
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "error occurred while parsing data from the source" in str(e.value) assert "error occurred while parsing data from the source" in str(e.value)
def test_fetch_history_unexpected_csv_format(src, type, spark_ok, requests_mock):
requests_mock.add(responses.GET, history_url("TSLA"), body="BAD HEADER\nBAD DATA")
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Unexpected CSV format" in str(e.value)

View file

@ -1,4 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
2021-01-05,125.010002,126.680000,124.610001,126.139999,123.101204,6114600
2021-01-06,null,null,null,null,null,null
2021-01-07,130.039993,130.460007,128.259995,128.990005,125.882545,4507400
1 Date Open High Low Close Adj Close Volume
2 2021-01-05 125.010002 126.680000 124.610001 126.139999 123.101204 6114600
3 2021-01-06 null null null null null null
4 2021-01-07 130.039993 130.460007 128.259995 128.990005 125.882545 4507400

View file

@ -1,11 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
1962-01-02,7.713333,7.713333,7.626667,7.626667,1.837710,390000
1962-01-03,7.626667,7.693333,7.626667,7.693333,1.853774,292500
1962-01-04,7.693333,7.693333,7.613333,7.616667,1.835299,262500
1962-01-05,7.606667,7.606667,7.453333,7.466667,1.799155,367500
1962-01-08,7.460000,7.460000,7.266667,7.326667,1.765422,547500
2021-01-04,125.849998,125.919998,123.040001,123.940002,120.954201,5179200
2021-01-05,125.010002,126.680000,124.610001,126.139999,123.101204,6114600
2021-01-06,126.900002,131.880005,126.720001,129.289993,126.175316,7956700
2021-01-07,130.039993,130.460007,128.259995,128.990005,125.882545,4507400
2021-01-08,128.570007,129.320007,126.980003,128.529999,125.433624,4676200
1 Date Open High Low Close Adj Close Volume
2 1962-01-02 7.713333 7.713333 7.626667 7.626667 1.837710 390000
3 1962-01-03 7.626667 7.693333 7.626667 7.693333 1.853774 292500
4 1962-01-04 7.693333 7.693333 7.613333 7.616667 1.835299 262500
5 1962-01-05 7.606667 7.606667 7.453333 7.466667 1.799155 367500
6 1962-01-08 7.460000 7.460000 7.266667 7.326667 1.765422 547500
7 2021-01-04 125.849998 125.919998 123.040001 123.940002 120.954201 5179200
8 2021-01-05 125.010002 126.680000 124.610001 126.139999 123.101204 6114600
9 2021-01-06 126.900002 131.880005 126.720001 129.289993 126.175316 7956700
10 2021-01-07 130.039993 130.460007 128.259995 128.990005 125.882545 4507400
11 2021-01-08 128.570007 129.320007 126.980003 128.529999 125.433624 4676200

View file

@ -0,0 +1,249 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "USD",
"symbol": "IBM",
"exchangeName": "NYQ",
"fullExchangeName": "NYSE",
"instrumentType": "EQUITY",
"firstTradeDate": -252322200,
"regularMarketTime": 1726257602,
"hasPrePostMarketData": true,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 214.79,
"fiftyTwoWeekHigh": 216.08,
"fiftyTwoWeekLow": 212.13,
"regularMarketDayHigh": 216.08,
"regularMarketDayLow": 212.13,
"regularMarketVolume": 4553547,
"longName": "International Business Machines Corporation",
"shortName": "International Business Machines",
"chartPreviousClose": 7.291,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"end": 1726234200,
"start": 1726214400,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"end": 1726257600,
"start": 1726234200,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"end": 1726272000,
"start": 1726257600,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
-252322200,
-252235800,
-252149400,
-252063000,
-251803800,
1609770600,
1609857000,
1609943400,
1610029800,
1610116200
],
"events": {
"dividends": {
"-249298200": {
"amount": 0.000956,
"date": -249298200
},
"-241439400": {
"amount": 0.000956,
"date": -241439400
},
"-233577000": {
"amount": 0.000956,
"date": -233577000
},
"-225797400": {
"amount": 0.000956,
"date": -225797400
},
"-217848600": {
"amount": 0.001275,
"date": -217848600
},
"1573137000": {
"amount": 1.548757,
"date": 1573137000
},
"1581085800": {
"amount": 1.548757,
"date": 1581085800
},
"1588858200": {
"amount": 1.558317,
"date": 1588858200
},
"1596807000": {
"amount": 1.558317,
"date": 1596807000
},
"1604932200": {
"amount": 1.558317,
"date": 1604932200
}
},
"splits": {
"-177417000": {
"date": -177417000,
"numerator": 5.0,
"denominator": 4.0,
"splitRatio": "5:4"
},
"-114345000": {
"date": -114345000,
"numerator": 3.0,
"denominator": 2.0,
"splitRatio": "3:2"
},
"-53343000": {
"date": -53343000,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
},
"107530200": {
"date": 107530200,
"numerator": 5.0,
"denominator": 4.0,
"splitRatio": "5:4"
},
"297091800": {
"date": 297091800,
"numerator": 4.0,
"denominator": 1.0,
"splitRatio": "4:1"
},
"864826200": {
"date": 864826200,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
},
"927811800": {
"date": 927811800,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
}
}
},
"indicators": {
"quote": [
{
"close": [
7.2912678718566895,
7.3550028800964355,
7.281707763671875,
7.138305187225342,
7.00446081161499,
118.48948669433594,
120.59273529052734,
123.60420989990234,
123.31739807128906,
122.87763214111328
],
"low": [
7.2912678718566895,
7.2912678718566895,
7.2785210609436035,
7.125557899475098,
6.9471001625061035,
117.62906646728516,
119.13002014160156,
121.14722442626953,
122.61949920654297,
121.39579010009766
],
"open": [
7.374124050140381,
7.2912678718566895,
7.3550028800964355,
7.272148132324219,
7.131930828094482,
120.31549072265625,
119.5124282836914,
121.3193130493164,
124.32122039794922,
122.9158706665039
],
"high": [
7.374124050140381,
7.3550028800964355,
7.3550028800964355,
7.272148132324219,
7.131930828094482,
120.38240814208984,
121.1089859008789,
126.08030700683594,
124.7227554321289,
123.63288879394531
],
"volume": [
407940,
305955,
274575,
384405,
572685,
5417443,
6395872,
8322708,
4714740,
4891305
]
}
],
"adjclose": [
{
"adjclose": [
1.5133211612701416,
1.5265485048294067,
1.5113375186920166,
1.4815733432769775,
1.4537923336029053,
99.60364532470703,
101.37164306640625,
103.90313720703125,
103.66202545166016,
103.29237365722656
]
}
]
}
}
],
"error": null
}
}

View file

@ -0,0 +1,119 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "INR",
"symbol": "INR=X",
"exchangeName": "CCY",
"fullExchangeName": "CCY",
"instrumentType": "CURRENCY",
"firstTradeDate": 1070236800,
"regularMarketTime": 1726284616,
"hasPrePostMarketData": false,
"gmtoffset": 3600,
"timezone": "BST",
"exchangeTimezoneName": "Europe/London",
"regularMarketPrice": 83.89,
"fiftyTwoWeekHigh": 83.89,
"fiftyTwoWeekLow": 83.89,
"regularMarketDayHigh": 83.89,
"regularMarketDayLow": 83.89,
"regularMarketVolume": 0,
"longName": "USD/INR",
"shortName": "USD/INR",
"chartPreviousClose": 64.6117,
"priceHint": 4,
"currentTradingPeriod": {
"pre": {
"timezone": "BST",
"start": 1726182000,
"end": 1726182000,
"gmtoffset": 3600
},
"regular": {
"timezone": "BST",
"start": 1726182000,
"end": 1726268340,
"gmtoffset": 3600
},
"post": {
"timezone": "BST",
"start": 1726268340,
"end": 1726268340,
"gmtoffset": 3600
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1499641200,
1499727600,
1499814000,
1499900400
],
"indicators": {
"quote": [
{
"open": [
64.6155014038086,
null,
64.55549621582031,
64.46800231933594
],
"volume": [
0,
null,
0,
0
],
"low": [
64.41000366210938,
null,
64.3499984741211,
64.33999633789062
],
"close": [
64.61170196533203,
null,
64.52559661865234,
64.36499786376953
],
"high": [
64.6155014038086,
null,
64.56999969482422,
64.48419952392578
]
}
],
"adjclose": [
{
"adjclose": [
64.61170196533203,
null,
64.52559661865234,
64.36499786376953
]
}
]
}
}
],
"error": null
}
}

View file

@ -1,6 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
2021-01-04,719.460022,744.489990,717.190002,729.770020,729.770020,48638200
2021-01-05,723.659973,740.840027,719.200012,735.109985,735.109985,32245200
2021-01-06,758.489990,774.000000,749.099976,755.979980,755.979980,44700000
2021-01-07,777.630005,816.989990,775.200012,816.039978,816.039978,51498900
2021-01-08,856.000000,884.489990,838.390015,880.020020,880.020020,75055500
1 Date Open High Low Close Adj Close Volume
2 2021-01-04 719.460022 744.489990 717.190002 729.770020 729.770020 48638200
3 2021-01-05 723.659973 740.840027 719.200012 735.109985 735.109985 32245200
4 2021-01-06 758.489990 774.000000 749.099976 755.979980 755.979980 44700000
5 2021-01-07 777.630005 816.989990 775.200012 816.039978 816.039978 51498900
6 2021-01-08 856.000000 884.489990 838.390015 880.020020 880.020020 75055500

View file

@ -0,0 +1,126 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "USD",
"symbol": "TSLA",
"exchangeName": "NMS",
"fullExchangeName": "NasdaqGS",
"instrumentType": "EQUITY",
"firstTradeDate": 1277818200,
"regularMarketTime": 1726257600,
"hasPrePostMarketData": true,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 230.29,
"fiftyTwoWeekHigh": 232.664,
"fiftyTwoWeekLow": 226.32,
"regularMarketDayHigh": 232.664,
"regularMarketDayLow": 226.32,
"regularMarketVolume": 59096538,
"longName": "Tesla, Inc.",
"shortName": "Tesla, Inc.",
"chartPreviousClose": 235.223,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"start": 1726214400,
"end": 1726234200,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"start": 1726234200,
"end": 1726257600,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"start": 1726257600,
"end": 1726272000,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1609770600,
1609857000,
1609943400,
1610029800,
1610116200
],
"indicators": {
"quote": [
{
"open": [
239.82000732421875,
241.22000122070312,
252.8300018310547,
259.2099914550781,
285.3333435058594
],
"close": [
243.2566680908203,
245.0366668701172,
251.9933319091797,
272.0133361816406,
293.3399963378906
],
"high": [
248.163330078125,
246.94667053222656,
258.0,
272.3299865722656,
294.8299865722656
],
"low": [
239.06333923339844,
239.73333740234375,
249.6999969482422,
258.3999938964844,
279.46331787109375
],
"volume": [
145914600,
96735600,
134100000,
154496700,
225166500
]
}
],
"adjclose": [
{
"adjclose": [
243.2566680908203,
245.0366668701172,
251.9933319091797,
272.0133361816406,
293.3399963378906
]
}
]
}
}
],
"error": null
}
}

View file

@ -1,77 +0,0 @@
{
"spark": {
"result": [
{
"symbol": "TSLA",
"response": [
{
"meta": {
"currency": "USD",
"symbol": "TSLA",
"exchangeName": "NMS",
"instrumentType": "EQUITY",
"firstTradeDate": 1277818200,
"regularMarketTime": 1626465603,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 644.22,
"chartPreviousClose": 650.6,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"start": 1626422400,
"end": 1626442200,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"start": 1626442200,
"end": 1626465600,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"start": 1626465600,
"end": 1626480000,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "1d",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1626442200,
1626465603
],
"indicators": {
"quote": [
{
"close": [
644.22,
644.22
]
}
]
}
}
]
}
],
"error": null
}
}