Compare commits
No commits in common. "master" and "1.4.10" have entirely different histories.
6 changed files with 13 additions and 154 deletions
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "pricehist"
|
name = "pricehist"
|
||||||
version = "1.4.12"
|
version = "1.4.10"
|
||||||
description = "Fetch and format historical price data"
|
description = "Fetch and format historical price data"
|
||||||
authors = ["Chris Berkhout <chris@chrisberkhout.com>"]
|
authors = ["Chris Berkhout <chris@chrisberkhout.com>"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__version__ = "1.4.12"
|
__version__ = "1.4.10"
|
||||||
|
|
|
@ -72,7 +72,6 @@ class Yahoo(BaseSource):
|
||||||
|
|
||||||
data = self._data(series)
|
data = self._data(series)
|
||||||
quote = data["chart"]["result"][0]["meta"]["currency"]
|
quote = data["chart"]["result"][0]["meta"]["currency"]
|
||||||
offset = data["chart"]["result"][0]["meta"]["gmtoffset"]
|
|
||||||
|
|
||||||
timestamps = data["chart"]["result"][0]["timestamp"]
|
timestamps = data["chart"]["result"][0]["timestamp"]
|
||||||
adjclose_data = data["chart"]["result"][0]["indicators"]["adjclose"][0]
|
adjclose_data = data["chart"]["result"][0]["indicators"]["adjclose"][0]
|
||||||
|
@ -80,21 +79,19 @@ class Yahoo(BaseSource):
|
||||||
amounts = {**adjclose_data, **rest_data}
|
amounts = {**adjclose_data, **rest_data}
|
||||||
|
|
||||||
prices = [
|
prices = [
|
||||||
Price(date, amount)
|
Price(ts, amount)
|
||||||
for i in range(len(timestamps))
|
for i in range(len(timestamps))
|
||||||
if (date := self._ts_to_date(timestamps[i] + offset)) <= series.end
|
if (ts := datetime.fromtimestamp(timestamps[i]).strftime("%Y-%m-%d"))
|
||||||
|
<= series.end
|
||||||
if (amount := self._amount(amounts, series.type, i)) is not None
|
if (amount := self._amount(amounts, series.type, i)) is not None
|
||||||
]
|
]
|
||||||
|
|
||||||
return dataclasses.replace(series, quote=quote, prices=prices)
|
return dataclasses.replace(series, quote=quote, prices=prices)
|
||||||
|
|
||||||
def _ts_to_date(self, ts) -> str:
|
|
||||||
return datetime.fromtimestamp(ts, tz=timezone.utc).date().isoformat()
|
|
||||||
|
|
||||||
def _amount(self, amounts, type, i):
|
def _amount(self, amounts, type, i):
|
||||||
if type == "mid" and amounts["high"] != "null" and amounts["low"] != "null":
|
if type == "mid" and amounts["high"] != "null" and amounts["low"] != "null":
|
||||||
return sum([Decimal(amounts["high"][i]), Decimal(amounts["low"][i])]) / 2
|
return sum([Decimal(amounts["high"][i]), Decimal(amounts["low"][i])]) / 2
|
||||||
elif amounts[type] != "null" and amounts[type][i] is not None:
|
elif amounts[type] != "null":
|
||||||
return Decimal(amounts[type][i])
|
return Decimal(amounts[type][i])
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -142,10 +139,11 @@ class Yahoo(BaseSource):
|
||||||
raise exceptions.InvalidPair(
|
raise exceptions.InvalidPair(
|
||||||
series.base, series.quote, self, "Symbol not found."
|
series.base, series.quote, self, "Symbol not found."
|
||||||
)
|
)
|
||||||
elif code == 400 and "Data doesn't exist" in text:
|
if code == 400 and "Data doesn't exist" in text:
|
||||||
raise exceptions.BadResponse(
|
raise exceptions.BadResponse(
|
||||||
"No data for the given interval. Try requesting a larger interval."
|
"No data for the given interval. Try requesting a larger interval."
|
||||||
)
|
)
|
||||||
|
|
||||||
elif code == 404 and "Timestamp data missing" in text:
|
elif code == 404 and "Timestamp data missing" in text:
|
||||||
raise exceptions.BadResponse(
|
raise exceptions.BadResponse(
|
||||||
"Data missing. The given interval may be for a gap in the data "
|
"Data missing. The given interval may be for a gap in the data "
|
||||||
|
@ -164,10 +162,4 @@ class Yahoo(BaseSource):
|
||||||
"The data couldn't be parsed. "
|
"The data couldn't be parsed. "
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
if "timestamp" not in data["chart"]["result"][0]:
|
|
||||||
raise exceptions.BadResponse(
|
|
||||||
"No data for the given interval. "
|
|
||||||
"There may be a problem with the symbol or the interval."
|
|
||||||
)
|
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
|
@ -75,10 +75,10 @@ name="Alpha Vantage physical currency"
|
||||||
cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14"
|
cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14"
|
||||||
read -r -d '' expected <<END
|
read -r -d '' expected <<END
|
||||||
date,base,quote,amount,source,type
|
date,base,quote,amount,source,type
|
||||||
2021-01-11,AUD,EUR,0.63374,alphavantage,close
|
2021-01-11,AUD,EUR,0.63318,alphavantage,close
|
||||||
2021-01-12,AUD,EUR,0.63684,alphavantage,close
|
2021-01-12,AUD,EUR,0.63664,alphavantage,close
|
||||||
2021-01-13,AUD,EUR,0.63686,alphavantage,close
|
2021-01-13,AUD,EUR,0.63585,alphavantage,close
|
||||||
2021-01-14,AUD,EUR,0.63984,alphavantage,close
|
2021-01-14,AUD,EUR,0.63960,alphavantage,close
|
||||||
END
|
END
|
||||||
run_test "$name" "$cmd" "$expected"
|
run_test "$name" "$cmd" "$expected"
|
||||||
|
|
||||||
|
@ -128,7 +128,7 @@ date,base,quote,amount,source,type
|
||||||
2021-01-07,BTC,USD,39713.5079,coindeskbpi,close
|
2021-01-07,BTC,USD,39713.5079,coindeskbpi,close
|
||||||
2021-01-08,BTC,USD,40519.4486,coindeskbpi,close
|
2021-01-08,BTC,USD,40519.4486,coindeskbpi,close
|
||||||
END
|
END
|
||||||
skip_test "$name" "$cmd" "$expected"
|
run_test "$name" "$cmd" "$expected"
|
||||||
|
|
||||||
name="CoinMarketCap"
|
name="CoinMarketCap"
|
||||||
cmd="pricehist fetch coinmarketcap BTC/EUR -s 2021-01-04 -e 2021-01-08"
|
cmd="pricehist fetch coinmarketcap BTC/EUR -s 2021-01-04 -e 2021-01-08"
|
||||||
|
|
|
@ -54,13 +54,6 @@ def long_ok(requests_mock):
|
||||||
yield requests_mock
|
yield requests_mock
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def with_null_ok(requests_mock):
|
|
||||||
json = (Path(os.path.splitext(__file__)[0]) / "inrx-with-null.json").read_text()
|
|
||||||
requests_mock.add(responses.GET, url("INR=X"), body=json, status=200)
|
|
||||||
yield requests_mock
|
|
||||||
|
|
||||||
|
|
||||||
def test_normalizesymbol(src):
|
def test_normalizesymbol(src):
|
||||||
assert src.normalizesymbol("tsla") == "TSLA"
|
assert src.normalizesymbol("tsla") == "TSLA"
|
||||||
|
|
||||||
|
@ -164,13 +157,6 @@ def test_fetch_from_before_start(src, type, long_ok):
|
||||||
assert len(series.prices) > 9
|
assert len(series.prices) > 9
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_skips_dates_with_nulls(src, type, with_null_ok):
|
|
||||||
series = src.fetch(Series("INR=X", "", type, "2017-07-10", "2017-07-12"))
|
|
||||||
assert series.prices[0] == Price("2017-07-10", Decimal("64.61170196533203125"))
|
|
||||||
assert series.prices[1] == Price("2017-07-12", Decimal("64.52559661865234375"))
|
|
||||||
assert len(series.prices) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_to_future(src, type, recent_ok):
|
def test_fetch_to_future(src, type, recent_ok):
|
||||||
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08"))
|
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08"))
|
||||||
assert len(series.prices) > 0
|
assert len(series.prices) > 0
|
||||||
|
|
|
@ -1,119 +0,0 @@
|
||||||
{
|
|
||||||
"chart": {
|
|
||||||
"result": [
|
|
||||||
{
|
|
||||||
"meta": {
|
|
||||||
"currency": "INR",
|
|
||||||
"symbol": "INR=X",
|
|
||||||
"exchangeName": "CCY",
|
|
||||||
"fullExchangeName": "CCY",
|
|
||||||
"instrumentType": "CURRENCY",
|
|
||||||
"firstTradeDate": 1070236800,
|
|
||||||
"regularMarketTime": 1726284616,
|
|
||||||
"hasPrePostMarketData": false,
|
|
||||||
"gmtoffset": 3600,
|
|
||||||
"timezone": "BST",
|
|
||||||
"exchangeTimezoneName": "Europe/London",
|
|
||||||
"regularMarketPrice": 83.89,
|
|
||||||
"fiftyTwoWeekHigh": 83.89,
|
|
||||||
"fiftyTwoWeekLow": 83.89,
|
|
||||||
"regularMarketDayHigh": 83.89,
|
|
||||||
"regularMarketDayLow": 83.89,
|
|
||||||
"regularMarketVolume": 0,
|
|
||||||
"longName": "USD/INR",
|
|
||||||
"shortName": "USD/INR",
|
|
||||||
"chartPreviousClose": 64.6117,
|
|
||||||
"priceHint": 4,
|
|
||||||
"currentTradingPeriod": {
|
|
||||||
"pre": {
|
|
||||||
"timezone": "BST",
|
|
||||||
"start": 1726182000,
|
|
||||||
"end": 1726182000,
|
|
||||||
"gmtoffset": 3600
|
|
||||||
},
|
|
||||||
"regular": {
|
|
||||||
"timezone": "BST",
|
|
||||||
"start": 1726182000,
|
|
||||||
"end": 1726268340,
|
|
||||||
"gmtoffset": 3600
|
|
||||||
},
|
|
||||||
"post": {
|
|
||||||
"timezone": "BST",
|
|
||||||
"start": 1726268340,
|
|
||||||
"end": 1726268340,
|
|
||||||
"gmtoffset": 3600
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dataGranularity": "1d",
|
|
||||||
"range": "",
|
|
||||||
"validRanges": [
|
|
||||||
"1d",
|
|
||||||
"5d",
|
|
||||||
"1mo",
|
|
||||||
"3mo",
|
|
||||||
"6mo",
|
|
||||||
"1y",
|
|
||||||
"2y",
|
|
||||||
"5y",
|
|
||||||
"10y",
|
|
||||||
"ytd",
|
|
||||||
"max"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"timestamp": [
|
|
||||||
1499641200,
|
|
||||||
1499727600,
|
|
||||||
1499814000,
|
|
||||||
1499900400
|
|
||||||
],
|
|
||||||
"indicators": {
|
|
||||||
"quote": [
|
|
||||||
{
|
|
||||||
"open": [
|
|
||||||
64.6155014038086,
|
|
||||||
null,
|
|
||||||
64.55549621582031,
|
|
||||||
64.46800231933594
|
|
||||||
],
|
|
||||||
"volume": [
|
|
||||||
0,
|
|
||||||
null,
|
|
||||||
0,
|
|
||||||
0
|
|
||||||
],
|
|
||||||
"low": [
|
|
||||||
64.41000366210938,
|
|
||||||
null,
|
|
||||||
64.3499984741211,
|
|
||||||
64.33999633789062
|
|
||||||
],
|
|
||||||
"close": [
|
|
||||||
64.61170196533203,
|
|
||||||
null,
|
|
||||||
64.52559661865234,
|
|
||||||
64.36499786376953
|
|
||||||
],
|
|
||||||
"high": [
|
|
||||||
64.6155014038086,
|
|
||||||
null,
|
|
||||||
64.56999969482422,
|
|
||||||
64.48419952392578
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"adjclose": [
|
|
||||||
{
|
|
||||||
"adjclose": [
|
|
||||||
64.61170196533203,
|
|
||||||
null,
|
|
||||||
64.52559661865234,
|
|
||||||
64.36499786376953
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"error": null
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Add table
Reference in a new issue