Compare commits

...

38 commits

Author SHA1 Message Date
Chris Berkhout
3aa09084ed Version 1.4.12. 2024-09-15 12:17:10 +00:00
Chris Berkhout
ab507b189c Update live test. 2024-09-15 12:16:36 +00:00
Chris Berkhout
53f39a26ef More time correction. 2024-09-15 11:35:56 +00:00
Chris Berkhout
dffe6f8e89 Timezone handling tweak. 2024-09-15 13:15:18 +02:00
Chris Berkhout
c78154df3a Add missing file. 2024-09-15 13:07:50 +02:00
Chris Berkhout
1164724ffb Version 1.4.11. 2024-09-15 13:01:11 +02:00
Chris Berkhout
77b2776e55 yahoo: More graceful handling of responses with meta but no timestamps. 2024-09-15 12:59:39 +02:00
Chris Berkhout
ee8ca0573d yahoo: add back null handling, improve timestamp handling.
Thanks @arkn98!
2024-09-15 12:46:45 +02:00
Chris Berkhout
b6f4c17530 Skip coindesk live test. 2024-09-14 22:49:45 +02:00
Chris Berkhout
5e75759b0f Version 1.4.10. 2024-09-14 22:24:46 +02:00
Chris Berkhout
59574e9156 Fix yahoo source. 2024-09-14 22:22:35 +02:00
Chris Berkhout
51e297b752 Update alphavantage source notes regarding API rate limit. 2024-08-03 17:23:53 +02:00
Chris Berkhout
b7d0d739ab Version 1.4.9. 2024-08-03 17:19:36 +02:00
Chris Berkhout
e8dec0bf64 Update Alpha Vantage rate limit handling. 2024-08-03 17:15:17 +02:00
Chris Berkhout
1e1003994c Version 1.4.8. 2024-08-03 13:04:25 +02:00
Chris Berkhout
4cfee667c3 Update coinmarketcap source notes. 2024-08-03 12:54:11 +02:00
Chris Berkhout
9eb6de4c44 live tests: reactivate coinmarketcap, update alphavantage physical for new data. 2024-08-03 12:40:28 +02:00
Chris Berkhout
9dd6121d4d Update coinmarketcap error handling and tests. 2024-08-03 12:33:48 +02:00
Chris Berkhout
5fdf16edb7 Update pytest. 2024-08-03 12:33:29 +02:00
Chris Berkhout
5a0de59aba coinmarketcap: fix quote output. 2024-08-02 09:47:07 +02:00
Chris Berkhout
8921653154 Fix coinmarketcap: first pass. 2024-07-23 22:08:22 +02:00
Chris Berkhout
b8c4554298 Fix flake8 warning. 2024-07-11 16:25:30 +02:00
Chris Berkhout
47544a11b6 Minor formatting. 2024-07-11 15:52:35 +02:00
Chris Berkhout
a12f3d3899 Version 1.4.7. 2024-07-11 15:34:32 +02:00
Chris Berkhout
86e178ea96 Skip live tests for sources with known issues that need more work. 2024-07-11 15:32:10 +02:00
Chris Berkhout
1f01c54c4d Update alphavantage physical and digital currency live test cases. 2024-07-11 15:31:37 +02:00
Chris Berkhout
f4aee18360 Update parsing of Alphavantage digital currency response data. 2024-07-11 15:30:45 +02:00
Chris Berkhout
0b377a8d65 Fix description of data taht doesn't overlap the requested range. 2024-07-11 15:29:51 +02:00
Chris Berkhout
733c849286 Follow flake8 advice. 2024-07-11 14:59:02 +02:00
Chris Berkhout
96d3e44738 Update python to ^3.8.1 and flake8 to ^7.1.0. 2024-07-11 14:56:28 +02:00
Chris Berkhout
6519cf2845 Update datetime formatting. 2024-07-11 11:40:38 +02:00
Chris Berkhout
04936c5cd6 Update lxml dependency. 2024-07-11 11:40:12 +02:00
Chris Berkhout
46dfd876ea Version 1.4.6. 2023-08-26 11:00:09 +02:00
Chris Berkhout
06c2876152 Make AlphaVantage premium endpoint rejection message check more robust. 2023-08-26 10:57:40 +02:00
Chris Berkhout
ffeebe5ffa Don't skip any AlphaVantage tests anymore. All pass. 2023-08-26 10:57:06 +02:00
Chris Berkhout
2b0f01110a Revert "Update Alphavantage source for changes in which endpoint is premium."
This reverts commit d6036c9d14.
2023-08-26 10:50:41 +02:00
Chris Berkhout
786ddd3c8c Update which Alphavantage test is skipped. 2023-08-26 10:48:49 +02:00
Chris Berkhout
bd3489ea71 Handle coinmarketcap return null for some prices. 2023-08-26 10:38:31 +02:00
28 changed files with 1809 additions and 2084 deletions

1234
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "pricehist" name = "pricehist"
version = "1.4.5" version = "1.4.12"
description = "Fetch and format historical price data" description = "Fetch and format historical price data"
authors = ["Chris Berkhout <chris@chrisberkhout.com>"] authors = ["Chris Berkhout <chris@chrisberkhout.com>"]
license = "MIT" license = "MIT"
@ -14,16 +14,16 @@ include = [
] ]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8.1"
requests = "^2.25.1" requests = "^2.25.1"
lxml = "^4.6.2" lxml = "^5.1.0"
cssselect = "^1.1.0" cssselect = "^1.1.0"
curlify = "^2.2.1" curlify = "^2.2.1"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pytest = "^6.2.2" pytest = "^8.3.2"
black = "^22.10.0" black = "^22.10.0"
flake8 = "^3.9.1" flake8 = "^7.1.0"
isort = "^5.8.0" isort = "^5.8.0"
responses = "^0.13.3" responses = "^0.13.3"
coverage = "^5.5" coverage = "^5.5"

View file

@ -1 +1 @@
__version__ = "1.4.5" __version__ = "1.4.12"

View file

@ -0,0 +1,4 @@
from pricehist import beanprice
from pricehist.sources.exchangeratehost import ExchangeRateHost
Source = beanprice.source(ExchangeRateHost())

View file

@ -80,5 +80,7 @@ def _cov_description(
f"and ends {end_uncovered} day{s(end_uncovered)} earlier " f"and ends {end_uncovered} day{s(end_uncovered)} earlier "
f"than requested" f"than requested"
) )
else: elif start_uncovered == 0 and end_uncovered == 0:
return "as requested" return "as requested"
else:
return "which doesn't match the request"

View file

@ -40,7 +40,7 @@ Classes:
import hashlib import hashlib
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from decimal import Decimal from decimal import Decimal
from importlib.resources import files from importlib.resources import files
@ -125,7 +125,7 @@ class GnuCashSQL(BaseOutput):
.read_text() .read_text()
.format( .format(
version=__version__, version=__version__,
timestamp=datetime.utcnow().isoformat() + "Z", timestamp=datetime.now(timezone.utc).isoformat()[:-6] + "Z",
base=self._sql_str(base), base=self._sql_str(base),
quote=self._sql_str(quote), quote=self._sql_str(quote),
values_comment=values_comment, values_comment=values_comment,

View file

@ -51,14 +51,13 @@ class AlphaVantage(BaseSource):
"will list all digital and physical currency symbols.\n" "will list all digital and physical currency symbols.\n"
"The PAIR for stocks is the stock symbol only. The quote currency " "The PAIR for stocks is the stock symbol only. The quote currency "
f"will be determined automatically. {self._stock_symbols_message()}\n" f"will be determined automatically. {self._stock_symbols_message()}\n"
"The price type 'adjclose' is only available for stocks.\n" "The price type 'adjclose' is only available for stocks, and "
"requires an access key for which premium endpoints are unlocked.\n"
"Beware that digital currencies quoted in non-USD currencies may " "Beware that digital currencies quoted in non-USD currencies may "
"be converted from USD data at one recent exchange rate rather " "be converted from USD data at one recent exchange rate rather "
"than using historical rates.\n" "than using historical rates.\n"
"Alpha Vantage's standard API call frequency limits is 5 calls per " "Alpha Vantage's standard API rate limit is 25 requests per day. "
"minute and 500 per day, so you may need to pause between successive " "Note that retrieving prices for one stock consumes two API calls."
"commands. Note that retrieving prices for one stock consumes two "
"API calls."
) )
def _stock_symbols_message(self): def _stock_symbols_message(self):
@ -174,9 +173,9 @@ class AlphaVantage(BaseSource):
expected_keys = ["1. symbol", "2. name", "3. type", "4. region", "8. currency"] expected_keys = ["1. symbol", "2. name", "3. type", "4. region", "8. currency"]
if ( if (
type(data) != dict type(data) is not dict
or "bestMatches" not in data or "bestMatches" not in data
or type(data["bestMatches"]) != list or type(data["bestMatches"]) is not list
or not all(k in m for k in expected_keys for m in data["bestMatches"]) or not all(k in m for k in expected_keys for m in data["bestMatches"])
): ):
raise exceptions.ResponseParsingError("Unexpected content.") raise exceptions.ResponseParsingError("Unexpected content.")
@ -186,9 +185,10 @@ class AlphaVantage(BaseSource):
def _stock_data(self, series): def _stock_data(self, series):
output_quote = self._stock_currency(series.base) or "UNKNOWN" output_quote = self._stock_currency(series.base) or "UNKNOWN"
# As of 2022-11-24 TIME_SERIES_DAILY_ADJUSTED is no longer premium, but if series.type == "adjclose":
# now TIME_SERIES_DAILY is. So, always use TIME_SERIES_DAILY_ADJUSTED.
function = "TIME_SERIES_DAILY_ADJUSTED" function = "TIME_SERIES_DAILY_ADJUSTED"
else:
function = "TIME_SERIES_DAILY"
params = { params = {
"function": function, "function": function,
@ -265,7 +265,7 @@ class AlphaVantage(BaseSource):
self._raise_for_generic_errors(data) self._raise_for_generic_errors(data)
if type(data) != dict or "Time Series FX (Daily)" not in data: if type(data) is not dict or "Time Series FX (Daily)" not in data:
raise exceptions.ResponseParsingError("Unexpected content.") raise exceptions.ResponseParsingError("Unexpected content.")
normalized_data = { normalized_data = {
@ -306,15 +306,15 @@ class AlphaVantage(BaseSource):
self._raise_for_generic_errors(data) self._raise_for_generic_errors(data)
if type(data) != dict or "Time Series (Digital Currency Daily)" not in data: if type(data) is not dict or "Time Series (Digital Currency Daily)" not in data:
raise exceptions.ResponseParsingError("Unexpected content.") raise exceptions.ResponseParsingError("Unexpected content.")
normalized_data = { normalized_data = {
day: { day: {
"open": entries[f"1a. open ({series.quote})"], "open": entries["1. open"],
"high": entries[f"2a. high ({series.quote})"], "high": entries["2. high"],
"low": entries[f"3a. low ({series.quote})"], "low": entries["3. low"],
"close": entries[f"4a. close ({series.quote})"], "close": entries["4. close"],
} }
for day, entries in reversed( for day, entries in reversed(
data["Time Series (Digital Currency Daily)"].items() data["Time Series (Digital Currency Daily)"].items()
@ -334,12 +334,13 @@ class AlphaVantage(BaseSource):
return key return key
def _raise_for_generic_errors(self, data): def _raise_for_generic_errors(self, data):
if type(data) == dict: if type(data) is dict:
if "Note" in data and "call frequency" in data["Note"]: if "Information" in data and "daily rate limits" in data["Information"]:
raise exceptions.RateLimit(data["Note"]) raise exceptions.RateLimit(data["Information"])
if ( if (
"Information" in data "Information" in data
and "unlock all premium endpoints" in data["Information"] and "unlock" in data["Information"]
and "premium" in data["Information"]
): ):
msg = "You were denied access to a premium endpoint." msg = "You were denied access to a premium endpoint."
raise exceptions.CredentialsError([self.API_KEY_NAME], self, msg) raise exceptions.CredentialsError([self.API_KEY_NAME], self, msg)

View file

@ -2,6 +2,7 @@ import dataclasses
import json import json
from datetime import datetime, timezone from datetime import datetime, timezone
from decimal import Decimal from decimal import Decimal
from functools import lru_cache
import requests import requests
@ -32,13 +33,16 @@ class CoinMarketCap(BaseSource):
def notes(self): def notes(self):
return ( return (
"This source makes unoffical use of endpoints that power CoinMarketCap's " "This source makes unoffical use of endpoints that power "
"public web interface. The price data comes from a public equivalent of " "CoinMarketCap's public web interface.\n"
"the OHLCV Historical endpoint found in CoinMarketCap's official API.\n" "CoinMarketCap currency symbols are not necessarily unique. "
"CoinMarketCap currency symbols are not necessarily unique, so it " "Each symbol you give will be coverted an ID by checking fiat and "
"is recommended that you use IDs, which can be listed via the " "metals first, then crypto by CoinMarketCap rank. "
"--symbols option. For example, 'ETH/BTC' is 'id=1027/id=1'. The " "The symbol data is hard-coded for fiat and metals, but fetched "
"corresponding symbols will be used in output." "live for crypto.\n"
"You can directly use IDs, which can be listed via the --symbols "
"option. For example, 'ETH/BTC' is 'id=1027/id=1'. "
"The corresponding symbols will be used in output, when available."
) )
def symbols(self): def symbols(self):
@ -55,8 +59,9 @@ class CoinMarketCap(BaseSource):
prices = [] prices = []
for item in data.get("quotes", []): for item in data.get("quotes", []):
d = item["time_open"][0:10] d = item["timeOpen"][0:10]
amount = self._amount(next(iter(item["quote"].values())), series.type) amount = self._amount(item["quote"], series.type)
if amount is not None:
prices.append(Price(d, amount)) prices.append(Price(d, amount))
output_base, output_quote = self._output_pair(series.base, series.quote, data) output_base, output_quote = self._output_pair(series.base, series.quote, data)
@ -66,21 +71,21 @@ class CoinMarketCap(BaseSource):
) )
def _data(self, series): def _data(self, series):
url = "https://web-api.coinmarketcap.com/v1/cryptocurrency/ohlcv/historical" url = "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical"
params = {} params = {}
if series.base.startswith("ID="): if series.base.startswith("ID="):
params["id"] = series.base[3:] params["id"] = series.base[3:]
else: else:
params["symbol"] = series.base params["id"] = self._id_from_symbol(series.base, series)
if series.quote.startswith("ID="): if series.quote.startswith("ID="):
params["convert_id"] = series.quote[3:] params["convertId"] = series.quote[3:]
else: else:
params["convert"] = series.quote params["convertId"] = self._id_from_symbol(series.quote, series)
params["time_start"] = int( params["timeStart"] = int(
int( int(
datetime.strptime(series.start, "%Y-%m-%d") datetime.strptime(series.start, "%Y-%m-%d")
.replace(tzinfo=timezone.utc) .replace(tzinfo=timezone.utc)
@ -89,12 +94,14 @@ class CoinMarketCap(BaseSource):
- 24 * 60 * 60 - 24 * 60 * 60
# Start one period earlier since the start is exclusive. # Start one period earlier since the start is exclusive.
) )
params["time_end"] = int( params["timeEnd"] = int(
datetime.strptime(series.end, "%Y-%m-%d") datetime.strptime(series.end, "%Y-%m-%d")
.replace(tzinfo=timezone.utc) .replace(tzinfo=timezone.utc)
.timestamp() .timestamp()
) # Don't round up since it's inclusive of the period covering the end time. ) # Don't round up since it's inclusive of the period covering the end time.
params["interval"] = "daily"
try: try:
response = self.log_curl(requests.get(url, params=params)) response = self.log_curl(requests.get(url, params=params))
except Exception as e: except Exception as e:
@ -113,26 +120,6 @@ class CoinMarketCap(BaseSource):
series.base, series.quote, self, "Bad quote ID." series.base, series.quote, self, "Bad quote ID."
) )
elif code == 400 and 'Invalid value for \\"convert\\"' in text:
raise exceptions.InvalidPair(
series.base, series.quote, self, "Bad quote symbol."
)
elif code == 400 and "must be older than" in text:
if series.start <= series.end:
raise exceptions.BadResponse("The start date must be in the past.")
else:
raise exceptions.BadResponse(
"The start date must preceed or match the end date."
)
elif (
code == 400
and "must be a valid ISO 8601 timestamp or unix time" in text
and series.start < "2001-09-11"
):
raise exceptions.BadResponse("The start date can't preceed 2001-09-11.")
try: try:
response.raise_for_status() response.raise_for_status()
except Exception as e: except Exception as e:
@ -143,7 +130,18 @@ class CoinMarketCap(BaseSource):
except Exception as e: except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e raise exceptions.ResponseParsingError(str(e)) from e
if type(parsed) != dict or "data" not in parsed: if (
"status" in parsed
and "error_code" in parsed["status"]
and parsed["status"]["error_code"] == "500"
and "The system is busy" in parsed["status"]["error_message"]
):
raise exceptions.BadResponse(
"The server indicated a general error. "
"There may be problem with your request."
)
if type(parsed) is not dict or "data" not in parsed:
raise exceptions.ResponseParsingError("Unexpected content.") raise exceptions.ResponseParsingError("Unexpected content.")
elif len(parsed["data"]) == 0: elif len(parsed["data"]) == 0:
@ -155,39 +153,154 @@ class CoinMarketCap(BaseSource):
return parsed["data"] return parsed["data"]
def _amount(self, data, type): def _amount(self, data, type):
if type in ["mid"]: if type in ["mid"] and data["high"] is not None and data["low"] is not None:
high = Decimal(str(data["high"])) high = Decimal(str(data["high"]))
low = Decimal(str(data["low"])) low = Decimal(str(data["low"]))
return sum([high, low]) / 2 return sum([high, low]) / 2
else: elif type in data and data[type] is not None:
return Decimal(str(data[type])) return Decimal(str(data[type]))
else:
return None
def _output_pair(self, base, quote, data): def _output_pair(self, base, quote, data):
data_base = data["symbol"] data_base = data["symbol"]
symbols = {i["id"]: (i["symbol"] or i["code"]) for i in self._symbol_data()}
data_quote = None data_quote = None
if len(data["quotes"]) > 0: if len(data["quotes"]) > 0:
data_quote = next(iter(data["quotes"][0]["quote"].keys())) data_quote = symbols[int(data["quotes"][0]["quote"]["name"])]
lookup_quote = None lookup_quote = None
if quote.startswith("ID="): if quote.startswith("ID="):
symbols = {i["id"]: (i["symbol"] or i["code"]) for i in self._symbol_data()}
lookup_quote = symbols[int(quote[3:])] lookup_quote = symbols[int(quote[3:])]
output_base = data_base output_base = data_base
output_quote = lookup_quote or data_quote or quote output_quote = data_quote or lookup_quote or quote
return (output_base, output_quote) return (output_base, output_quote)
def _id_from_symbol(self, symbol, series):
for i in self._symbol_data():
if i["symbol"] == symbol:
return i["id"]
raise exceptions.InvalidPair(
series.base, series.quote, self, f"Invalid symbol '{symbol}'."
)
@lru_cache(maxsize=1)
def _symbol_data(self): def _symbol_data(self):
base_url = "https://web-api.coinmarketcap.com/v1/"
fiat_url = f"{base_url}fiat/map?include_metals=true" base_url = "https://api.coinmarketcap.com/data-api/v1/"
crypto_url = f"{base_url}cryptocurrency/map?sort=cmc_rank" crypto_url = f"{base_url}cryptocurrency/map?sort=cmc_rank"
fiat = self._get_json_data(fiat_url)
crypto = self._get_json_data(crypto_url) crypto = self._get_json_data(crypto_url)
return crypto + fiat # fmt: off
fiat = [
{"id": 2781, "symbol": "USD", "name": "United States Dollar"},
{"id": 3526, "symbol": "ALL", "name": "Albanian Lek"},
{"id": 3537, "symbol": "DZD", "name": "Algerian Dinar"},
{"id": 2821, "symbol": "ARS", "name": "Argentine Peso"},
{"id": 3527, "symbol": "AMD", "name": "Armenian Dram"},
{"id": 2782, "symbol": "AUD", "name": "Australian Dollar"},
{"id": 3528, "symbol": "AZN", "name": "Azerbaijani Manat"},
{"id": 3531, "symbol": "BHD", "name": "Bahraini Dinar"},
{"id": 3530, "symbol": "BDT", "name": "Bangladeshi Taka"},
{"id": 3533, "symbol": "BYN", "name": "Belarusian Ruble"},
{"id": 3532, "symbol": "BMD", "name": "Bermudan Dollar"},
{"id": 2832, "symbol": "BOB", "name": "Bolivian Boliviano"},
{"id": 3529, "symbol": "BAM", "name": "Bosnia-Herzegovina Convertible Mark"}, # noqa: E501
{"id": 2783, "symbol": "BRL", "name": "Brazilian Real"},
{"id": 2814, "symbol": "BGN", "name": "Bulgarian Lev"},
{"id": 3549, "symbol": "KHR", "name": "Cambodian Riel"},
{"id": 2784, "symbol": "CAD", "name": "Canadian Dollar"},
{"id": 2786, "symbol": "CLP", "name": "Chilean Peso"},
{"id": 2787, "symbol": "CNY", "name": "Chinese Yuan"},
{"id": 2820, "symbol": "COP", "name": "Colombian Peso"},
{"id": 3534, "symbol": "CRC", "name": "Costa Rican Colón"},
{"id": 2815, "symbol": "HRK", "name": "Croatian Kuna"},
{"id": 3535, "symbol": "CUP", "name": "Cuban Peso"},
{"id": 2788, "symbol": "CZK", "name": "Czech Koruna"},
{"id": 2789, "symbol": "DKK", "name": "Danish Krone"},
{"id": 3536, "symbol": "DOP", "name": "Dominican Peso"},
{"id": 3538, "symbol": "EGP", "name": "Egyptian Pound"},
{"id": 2790, "symbol": "EUR", "name": "Euro"},
{"id": 3539, "symbol": "GEL", "name": "Georgian Lari"},
{"id": 3540, "symbol": "GHS", "name": "Ghanaian Cedi"},
{"id": 3541, "symbol": "GTQ", "name": "Guatemalan Quetzal"},
{"id": 3542, "symbol": "HNL", "name": "Honduran Lempira"},
{"id": 2792, "symbol": "HKD", "name": "Hong Kong Dollar"},
{"id": 2793, "symbol": "HUF", "name": "Hungarian Forint"},
{"id": 2818, "symbol": "ISK", "name": "Icelandic Króna"},
{"id": 2796, "symbol": "INR", "name": "Indian Rupee"},
{"id": 2794, "symbol": "IDR", "name": "Indonesian Rupiah"},
{"id": 3544, "symbol": "IRR", "name": "Iranian Rial"},
{"id": 3543, "symbol": "IQD", "name": "Iraqi Dinar"},
{"id": 2795, "symbol": "ILS", "name": "Israeli New Shekel"},
{"id": 3545, "symbol": "JMD", "name": "Jamaican Dollar"},
{"id": 2797, "symbol": "JPY", "name": "Japanese Yen"},
{"id": 3546, "symbol": "JOD", "name": "Jordanian Dinar"},
{"id": 3551, "symbol": "KZT", "name": "Kazakhstani Tenge"},
{"id": 3547, "symbol": "KES", "name": "Kenyan Shilling"},
{"id": 3550, "symbol": "KWD", "name": "Kuwaiti Dinar"},
{"id": 3548, "symbol": "KGS", "name": "Kyrgystani Som"},
{"id": 3552, "symbol": "LBP", "name": "Lebanese Pound"},
{"id": 3556, "symbol": "MKD", "name": "Macedonian Denar"},
{"id": 2800, "symbol": "MYR", "name": "Malaysian Ringgit"},
{"id": 2816, "symbol": "MUR", "name": "Mauritian Rupee"},
{"id": 2799, "symbol": "MXN", "name": "Mexican Peso"},
{"id": 3555, "symbol": "MDL", "name": "Moldovan Leu"},
{"id": 3558, "symbol": "MNT", "name": "Mongolian Tugrik"},
{"id": 3554, "symbol": "MAD", "name": "Moroccan Dirham"},
{"id": 3557, "symbol": "MMK", "name": "Myanma Kyat"},
{"id": 3559, "symbol": "NAD", "name": "Namibian Dollar"},
{"id": 3561, "symbol": "NPR", "name": "Nepalese Rupee"},
{"id": 2811, "symbol": "TWD", "name": "New Taiwan Dollar"},
{"id": 2802, "symbol": "NZD", "name": "New Zealand Dollar"},
{"id": 3560, "symbol": "NIO", "name": "Nicaraguan Córdoba"},
{"id": 2819, "symbol": "NGN", "name": "Nigerian Naira"},
{"id": 2801, "symbol": "NOK", "name": "Norwegian Krone"},
{"id": 3562, "symbol": "OMR", "name": "Omani Rial"},
{"id": 2804, "symbol": "PKR", "name": "Pakistani Rupee"},
{"id": 3563, "symbol": "PAB", "name": "Panamanian Balboa"},
{"id": 2822, "symbol": "PEN", "name": "Peruvian Sol"},
{"id": 2803, "symbol": "PHP", "name": "Philippine Peso"},
{"id": 2805, "symbol": "PLN", "name": "Polish Złoty"},
{"id": 2791, "symbol": "GBP", "name": "Pound Sterling"},
{"id": 3564, "symbol": "QAR", "name": "Qatari Rial"},
{"id": 2817, "symbol": "RON", "name": "Romanian Leu"},
{"id": 2806, "symbol": "RUB", "name": "Russian Ruble"},
{"id": 3566, "symbol": "SAR", "name": "Saudi Riyal"},
{"id": 3565, "symbol": "RSD", "name": "Serbian Dinar"},
{"id": 2808, "symbol": "SGD", "name": "Singapore Dollar"},
{"id": 2812, "symbol": "ZAR", "name": "South African Rand"},
{"id": 2798, "symbol": "KRW", "name": "South Korean Won"},
{"id": 3567, "symbol": "SSP", "name": "South Sudanese Pound"},
{"id": 3573, "symbol": "VES", "name": "Sovereign Bolivar"},
{"id": 3553, "symbol": "LKR", "name": "Sri Lankan Rupee"},
{"id": 2807, "symbol": "SEK", "name": "Swedish Krona"},
{"id": 2785, "symbol": "CHF", "name": "Swiss Franc"},
{"id": 2809, "symbol": "THB", "name": "Thai Baht"},
{"id": 3569, "symbol": "TTD", "name": "Trinidad and Tobago Dollar"},
{"id": 3568, "symbol": "TND", "name": "Tunisian Dinar"},
{"id": 2810, "symbol": "TRY", "name": "Turkish Lira"},
{"id": 3570, "symbol": "UGX", "name": "Ugandan Shilling"},
{"id": 2824, "symbol": "UAH", "name": "Ukrainian Hryvnia"},
{"id": 2813, "symbol": "AED", "name": "United Arab Emirates Dirham"},
{"id": 3571, "symbol": "UYU", "name": "Uruguayan Peso"},
{"id": 3572, "symbol": "UZS", "name": "Uzbekistan Som"},
{"id": 2823, "symbol": "VND", "name": "Vietnamese Dong"},
]
metals = [
{"id": 3575, "symbol": "XAU", "name": "Gold Troy Ounce"},
{"id": 3574, "symbol": "XAG", "name": "Silver Troy Ounce"},
{"id": 3577, "symbol": "XPT", "name": "Platinum Ounce"},
{"id": 3576, "symbol": "XPD", "name": "Palladium Ounce"},
]
# fmt: on
return fiat + metals + crypto
def _get_json_data(self, url, params={}): def _get_json_data(self, url, params={}):
try: try:
@ -205,7 +318,7 @@ class CoinMarketCap(BaseSource):
except Exception as e: except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e raise exceptions.ResponseParsingError(str(e)) from e
if type(parsed) != dict or "data" not in parsed: if type(parsed) is not dict or "data" not in parsed:
raise exceptions.ResponseParsingError("Unexpected content.") raise exceptions.ResponseParsingError("Unexpected content.")
elif len(parsed["data"]) == 0: elif len(parsed["data"]) == 0:

View file

@ -0,0 +1,122 @@
import dataclasses
import json
from decimal import Decimal
import requests
from pricehist import exceptions
from pricehist.price import Price
from .basesource import BaseSource
class ExchangeRateHost(BaseSource):
def id(self):
return "exchangeratehost"
def name(self):
return "exchangerate.host Exchange rates API"
def description(self):
return (
"Exchange rates API is a simple and lightweight free service for "
"current and historical foreign exchange rates & crypto exchange "
"rates."
)
def source_url(self):
return "https://exchangerate.host/"
def start(self):
return "1999-01-01"
def types(self):
return ["close"]
def notes(self):
return ""
def symbols(self):
url = "https://api.coindesk.com/v1/bpi/supported-currencies.json"
try:
response = self.log_curl(requests.get(url))
except Exception as e:
raise exceptions.RequestError(str(e)) from e
try:
response.raise_for_status()
except Exception as e:
raise exceptions.BadResponse(str(e)) from e
try:
data = json.loads(response.content)
relevant = [i for i in data if i["currency"] not in ["BTC", "XBT"]]
results = [
(f"BTC/{i['currency']}", f"Bitcoin against {i['country']}")
for i in sorted(relevant, key=lambda i: i["currency"])
]
except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e
if not results:
raise exceptions.ResponseParsingError("Expected data not found")
else:
return results
def fetch(self, series):
if series.base != "BTC" or series.quote in ["BTC", "XBT"]:
# BTC is the only valid base.
# BTC as the quote will return BTC/USD, which we don't want.
# XBT as the quote will fail with HTTP status 500.
raise exceptions.InvalidPair(series.base, series.quote, self)
data = self._data(series)
prices = []
for (d, v) in data.get("bpi", {}).items():
prices.append(Price(d, Decimal(str(v))))
return dataclasses.replace(series, prices=prices)
def _data(self, series):
url = "https://api.coindesk.com/v1/bpi/historical/close.json"
params = {
"currency": series.quote,
"start": series.start,
"end": series.end,
}
try:
response = self.log_curl(requests.get(url, params=params))
except Exception as e:
raise exceptions.RequestError(str(e)) from e
code = response.status_code
text = response.text
if code == 404 and "currency was not found" in text:
raise exceptions.InvalidPair(series.base, series.quote, self)
elif code == 404 and "only covers data from" in text:
raise exceptions.BadResponse(text)
elif code == 404 and "end date is before" in text and series.end < series.start:
raise exceptions.BadResponse("End date is before start date.")
elif code == 404 and "end date is before" in text:
raise exceptions.BadResponse("The start date must be in the past.")
elif code == 500 and "No results returned from database" in text:
raise exceptions.BadResponse(
"No results returned from database. This can happen when data "
"for a valid quote currency (e.g. CUP) doesn't go all the way "
"back to the start date, and potentially for other reasons."
)
else:
try:
response.raise_for_status()
except Exception as e:
raise exceptions.BadResponse(str(e)) from e
try:
result = json.loads(response.content)
except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e
return result

View file

@ -1,4 +1,3 @@
import csv
import dataclasses import dataclasses
import json import json
import logging import logging
@ -71,63 +70,39 @@ class Yahoo(BaseSource):
series.base, series.quote, self, "Don't specify the quote currency." series.base, series.quote, self, "Don't specify the quote currency."
) )
quote, history = self._data(series) data = self._data(series)
quote = data["chart"]["result"][0]["meta"]["currency"]
offset = data["chart"]["result"][0]["meta"]["gmtoffset"]
timestamps = data["chart"]["result"][0]["timestamp"]
adjclose_data = data["chart"]["result"][0]["indicators"]["adjclose"][0]
rest_data = data["chart"]["result"][0]["indicators"]["quote"][0]
amounts = {**adjclose_data, **rest_data}
prices = [ prices = [
Price(row["date"], amount) Price(date, amount)
for row in history for i in range(len(timestamps))
if (amount := self._amount(row, series.type)) if (date := self._ts_to_date(timestamps[i] + offset)) <= series.end
if (amount := self._amount(amounts, series.type, i)) is not None
] ]
return dataclasses.replace(series, quote=quote, prices=prices) return dataclasses.replace(series, quote=quote, prices=prices)
def _amount(self, row, type): def _ts_to_date(self, ts) -> str:
if type == "mid" and row["high"] != "null" and row["low"] != "null": return datetime.fromtimestamp(ts, tz=timezone.utc).date().isoformat()
return sum([Decimal(row["high"]), Decimal(row["low"])]) / 2
elif row[type] != "null": def _amount(self, amounts, type, i):
return Decimal(row[type]) if type == "mid" and amounts["high"] != "null" and amounts["low"] != "null":
return sum([Decimal(amounts["high"][i]), Decimal(amounts["low"][i])]) / 2
elif amounts[type] != "null" and amounts[type][i] is not None:
return Decimal(amounts[type][i])
else: else:
return None return None
def _data(self, series) -> (dict, csv.DictReader): def _data(self, series) -> dict:
base_url = "https://query1.finance.yahoo.com/v7/finance" base_url = "https://query1.finance.yahoo.com/v8/finance/chart"
headers = {"User-Agent": f"pricehist/{__version__}"} headers = {"User-Agent": f"pricehist/{__version__}"}
url = f"{base_url}/{series.base}"
spark_url = f"{base_url}/spark"
spark_params = {
"symbols": series.base,
"range": "1d",
"interval": "1d",
"indicators": "close",
"includeTimestamps": "false",
"includePrePost": "false",
}
try:
spark_response = self.log_curl(
requests.get(spark_url, params=spark_params, headers=headers)
)
except Exception as e:
raise exceptions.RequestError(str(e)) from e
code = spark_response.status_code
text = spark_response.text
if code == 404 and "No data found for spark symbols" in text:
raise exceptions.InvalidPair(
series.base, series.quote, self, "Symbol not found."
)
try:
spark_response.raise_for_status()
except Exception as e:
raise exceptions.BadResponse(str(e)) from e
try:
spark = json.loads(spark_response.content)
quote = spark["spark"]["result"][0]["response"][0]["meta"]["currency"]
except Exception as e:
raise exceptions.ResponseParsingError(
"The spark data couldn't be parsed. "
) from e
start_ts = int( start_ts = int(
datetime.strptime(series.start, "%Y-%m-%d") datetime.strptime(series.start, "%Y-%m-%d")
@ -142,34 +117,35 @@ class Yahoo(BaseSource):
24 * 60 * 60 24 * 60 * 60
) # some symbols require padding on the end timestamp ) # some symbols require padding on the end timestamp
history_url = f"{base_url}/download/{series.base}" params = {
history_params = { "symbol": series.base,
"period1": start_ts, "period1": start_ts,
"period2": end_ts, "period2": end_ts,
"interval": "1d", "interval": "1d",
"events": "history", "events": "capitalGain%7Cdiv%7Csplit",
"includeAdjustedClose": "true", "includeAdjustedClose": "true",
"formatted": "true",
"userYfid": "true",
"lang": "en-US",
"region": "US",
} }
try: try:
history_response = self.log_curl( response = self.log_curl(requests.get(url, params=params, headers=headers))
requests.get(history_url, params=history_params, headers=headers)
)
except Exception as e: except Exception as e:
raise exceptions.RequestError(str(e)) from e raise exceptions.RequestError(str(e)) from e
code = history_response.status_code code = response.status_code
text = history_response.text text = response.text
if code == 404 and "No data found, symbol may be delisted" in text: if code == 404 and "No data found, symbol may be delisted" in text:
raise exceptions.InvalidPair( raise exceptions.InvalidPair(
series.base, series.quote, self, "Symbol not found." series.base, series.quote, self, "Symbol not found."
) )
if code == 400 and "Data doesn't exist" in text: elif code == 400 and "Data doesn't exist" in text:
raise exceptions.BadResponse( raise exceptions.BadResponse(
"No data for the given interval. Try requesting a larger interval." "No data for the given interval. Try requesting a larger interval."
) )
elif code == 404 and "Timestamp data missing" in text: elif code == 404 and "Timestamp data missing" in text:
raise exceptions.BadResponse( raise exceptions.BadResponse(
"Data missing. The given interval may be for a gap in the data " "Data missing. The given interval may be for a gap in the data "
@ -177,20 +153,21 @@ class Yahoo(BaseSource):
) )
try: try:
history_response.raise_for_status() response.raise_for_status()
except Exception as e: except Exception as e:
raise exceptions.BadResponse(str(e)) from e raise exceptions.BadResponse(str(e)) from e
try: try:
history_lines = history_response.content.decode("utf-8").splitlines() data = json.loads(response.content)
history_lines[0] = history_lines[0].lower().replace(" ", "")
history = csv.DictReader(history_lines, delimiter=",")
except Exception as e: except Exception as e:
raise exceptions.ResponseParsingError(str(e)) from e raise exceptions.ResponseParsingError(
"The data couldn't be parsed. "
) from e
if history_lines[0] != "date,open,high,low,close,adjclose,volume": if "timestamp" not in data["chart"]["result"][0]:
raise exceptions.ResponseParsingError("Unexpected CSV format") raise exceptions.BadResponse(
"No data for the given interval. "
"There may be a problem with the symbol or the interval."
)
requested_history = [row for row in history if row["date"] <= series.end] return data
return (quote, requested_history)

View file

@ -63,42 +63,36 @@ name="Alpha Vantage stocks"
cmd="pricehist fetch alphavantage TSLA -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch alphavantage TSLA -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,TSLA,USD,729.77,alphavantage,close 2021-01-04,TSLA,USD,729.7700,alphavantage,close
2021-01-05,TSLA,USD,735.11,alphavantage,close 2021-01-05,TSLA,USD,735.1100,alphavantage,close
2021-01-06,TSLA,USD,755.98,alphavantage,close 2021-01-06,TSLA,USD,755.9800,alphavantage,close
2021-01-07,TSLA,USD,816.04,alphavantage,close 2021-01-07,TSLA,USD,816.0400,alphavantage,close
2021-01-08,TSLA,USD,880.02,alphavantage,close 2021-01-08,TSLA,USD,880.0200,alphavantage,close
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"
name="Alpha Vantage physical currency" name="Alpha Vantage physical currency"
cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,AUD,EUR,0.62558,alphavantage,close 2021-01-11,AUD,EUR,0.63374,alphavantage,close
2021-01-05,AUD,EUR,0.63086,alphavantage,close 2021-01-12,AUD,EUR,0.63684,alphavantage,close
2021-01-06,AUD,EUR,0.63306,alphavantage,close 2021-01-13,AUD,EUR,0.63686,alphavantage,close
2021-01-07,AUD,EUR,0.63284,alphavantage,close 2021-01-14,AUD,EUR,0.63984,alphavantage,close
2021-01-08,AUD,EUR,0.63530,alphavantage,close
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"
name="Alpha Vantage digital currency" name="Alpha Vantage digital currency"
cmd="pricehist fetch alphavantage BTC/USD -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch alphavantage BTC/USD -s 2024-07-01 -e 2024-07-05"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,BTC,USD,31988.71000000,alphavantage,close 2024-07-01,BTC,USD,62830.13000000,alphavantage,close
2021-01-05,BTC,USD,33949.53000000,alphavantage,close 2024-07-02,BTC,USD,62040.22000000,alphavantage,close
2021-01-06,BTC,USD,36769.36000000,alphavantage,close 2024-07-03,BTC,USD,60145.01000000,alphavantage,close
2021-01-07,BTC,USD,39432.28000000,alphavantage,close 2024-07-04,BTC,USD,57042.14000000,alphavantage,close
2021-01-08,BTC,USD,40582.81000000,alphavantage,close 2024-07-05,BTC,USD,56639.43000000,alphavantage,close
END END
if [[ "$(date --iso-8601)" < "2023-06-15" ]]; then
skip_test "$name" "$cmd" "$expected"
else
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"
fi
name="Bank of Canada" name="Bank of Canada"
cmd="pricehist fetch bankofcanada CAD/USD -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch bankofcanada CAD/USD -s 2021-01-04 -e 2021-01-08"
@ -122,30 +116,29 @@ date,base,quote,amount,source,type
2021-01-07,BTC,EUR,31208.49,coinbasepro,mid 2021-01-07,BTC,EUR,31208.49,coinbasepro,mid
2021-01-08,BTC,EUR,32019,coinbasepro,mid 2021-01-08,BTC,EUR,32019,coinbasepro,mid
END END
run_test "$name" "$cmd" "$expected" skip_test "$name" "$cmd" "$expected"
name="CoinDesk Bitcoin Price Index" name="CoinDesk Bitcoin Price Index v1"
cmd="pricehist fetch coindesk BTC/EUR -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch coindeskbpi BTC/USD -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
WARNING This source is deprecated. Data stops at 2022-07-10.
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,BTC,EUR,26135.4901,coindesk,close 2021-01-04,BTC,USD,31431.6123,coindeskbpi,close
2021-01-05,BTC,EUR,27677.9141,coindesk,close 2021-01-05,BTC,USD,34433.6065,coindeskbpi,close
2021-01-06,BTC,EUR,29871.4301,coindesk,close 2021-01-06,BTC,USD,36275.7563,coindeskbpi,close
2021-01-07,BTC,EUR,32183.1594,coindesk,close 2021-01-07,BTC,USD,39713.5079,coindeskbpi,close
2021-01-08,BTC,EUR,33238.5724,coindesk,close 2021-01-08,BTC,USD,40519.4486,coindeskbpi,close
END END
run_test "$name" "$cmd" "$expected" skip_test "$name" "$cmd" "$expected"
name="CoinMarketCap" name="CoinMarketCap"
cmd="pricehist fetch coinmarketcap BTC/EUR -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch coinmarketcap BTC/EUR -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,BTC,EUR,25329.110170161484,coinmarketcap,mid 2021-01-04,BTC,EUR,25322.5034586073,coinmarketcap,mid
2021-01-05,BTC,EUR,26321.26752264663,coinmarketcap,mid 2021-01-05,BTC,EUR,26318.9928757682,coinmarketcap,mid
2021-01-06,BTC,EUR,28572.211551075297,coinmarketcap,mid 2021-01-06,BTC,EUR,28570.9945210226,coinmarketcap,mid
2021-01-07,BTC,EUR,31200.894541155460,coinmarketcap,mid 2021-01-07,BTC,EUR,31200.8342706036,coinmarketcap,mid
2021-01-08,BTC,EUR,32155.0183793871585,coinmarketcap,mid 2021-01-08,BTC,EUR,32157.05279624555,coinmarketcap,mid
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"
@ -165,11 +158,11 @@ name="Yahoo! Finance"
cmd="pricehist fetch yahoo TSLA -s 2021-01-04 -e 2021-01-08" cmd="pricehist fetch yahoo TSLA -s 2021-01-04 -e 2021-01-08"
read -r -d '' expected <<END read -r -d '' expected <<END
date,base,quote,amount,source,type date,base,quote,amount,source,type
2021-01-04,TSLA,USD,243.256668,yahoo,adjclose 2021-01-04,TSLA,USD,243.2566680908203125,yahoo,adjclose
2021-01-05,TSLA,USD,245.036667,yahoo,adjclose 2021-01-05,TSLA,USD,245.0366668701171875,yahoo,adjclose
2021-01-06,TSLA,USD,251.993332,yahoo,adjclose 2021-01-06,TSLA,USD,251.9933319091796875,yahoo,adjclose
2021-01-07,TSLA,USD,272.013336,yahoo,adjclose 2021-01-07,TSLA,USD,272.013336181640625,yahoo,adjclose
2021-01-08,TSLA,USD,293.339996,yahoo,adjclose 2021-01-08,TSLA,USD,293.339996337890625,yahoo,adjclose
END END
run_test "$name" "$cmd" "$expected" run_test "$name" "$cmd" "$expected"

View file

@ -48,6 +48,9 @@ search_url = re.compile(
r"https://www\.alphavantage\.co/query\?function=SYMBOL_SEARCH.*" r"https://www\.alphavantage\.co/query\?function=SYMBOL_SEARCH.*"
) )
stock_url = re.compile( stock_url = re.compile(
r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY&.*"
)
adj_stock_url = re.compile(
r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY_ADJUSTED.*" r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY_ADJUSTED.*"
) )
physical_url = re.compile(r"https://www\.alphavantage\.co/query\?function=FX_DAILY.*") physical_url = re.compile(r"https://www\.alphavantage\.co/query\?function=FX_DAILY.*")
@ -56,17 +59,22 @@ digital_url = re.compile(
) )
rate_limit_json = ( rate_limit_json = (
'{ "Note": "' '{ "Information": "'
"Thank you for using Alpha Vantage! Our standard API call frequency is 5 " "Thank you for using Alpha Vantage! Our standard API rate limit is 25 "
"calls per minute and 500 calls per day. Please visit " "requests per day. Please subscribe to any of the premium plans at "
"https://www.alphavantage.co/premium/ if you would like to target a higher " "https://www.alphavantage.co/premium/ to instantly remove all daily rate "
"API call frequency." "limits."
'" }' '" }'
) )
premium_json = ( premium_json = (
'{ "Information": "Thank you for using Alpha Vantage! This is a premium ' '{ "Information": "Thank you for using Alpha Vantage! This is a premium '
"endpoint. You may subscribe to any of the premium plans at " "endpoint and there are multiple ways to unlock premium endpoints: (1) "
"become a holder of Alpha Vantage Coin (AVC), an Ethereum-based "
"cryptocurrency that provides various utility & governance functions "
"within the Alpha Vantage ecosystem (AVC mining guide: "
"https://www.alphatournament.com/avc_mining_guide/) to unlock all "
"premium endpoints, (2) subscribe to any of the premium plans at "
"https://www.alphavantage.co/premium/ to instantly unlock all premium " "https://www.alphavantage.co/premium/ to instantly unlock all premium "
'endpoints" }' 'endpoints" }'
) )
@ -109,7 +117,7 @@ def ibm_ok(requests_mock):
@pytest.fixture @pytest.fixture
def ibm_adj_ok(requests_mock): def ibm_adj_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "ibm-partial-adj.json").read_text() json = (Path(os.path.splitext(__file__)[0]) / "ibm-partial-adj.json").read_text()
requests_mock.add(responses.GET, stock_url, body=json, status=200) requests_mock.add(responses.GET, adj_stock_url, body=json, status=200)
yield requests_mock yield requests_mock
@ -296,7 +304,7 @@ def test_fetch_stock_known(src, type, search_ok, ibm_ok):
stock_req = ibm_ok.calls[1].request stock_req = ibm_ok.calls[1].request
assert search_req.params["function"] == "SYMBOL_SEARCH" assert search_req.params["function"] == "SYMBOL_SEARCH"
assert search_req.params["keywords"] == "IBM" assert search_req.params["keywords"] == "IBM"
assert stock_req.params["function"] == "TIME_SERIES_DAILY_ADJUSTED" assert stock_req.params["function"] == "TIME_SERIES_DAILY"
assert stock_req.params["symbol"] == "IBM" assert stock_req.params["symbol"] == "IBM"
assert stock_req.params["outputsize"] == "full" assert stock_req.params["outputsize"] == "full"
assert (series.base, series.quote) == ("IBM", "USD") assert (series.base, series.quote) == ("IBM", "USD")
@ -419,7 +427,7 @@ def test_fetch_stock_rate_limit(src, type, search_ok, requests_mock):
def test_fetch_stock_premium(src, search_ok, requests_mock): def test_fetch_stock_premium(src, search_ok, requests_mock):
requests_mock.add(responses.GET, stock_url, body=premium_json) requests_mock.add(responses.GET, adj_stock_url, body=premium_json)
with pytest.raises(exceptions.CredentialsError) as e: with pytest.raises(exceptions.CredentialsError) as e:
src.fetch(Series("IBM", "", "adjclose", "2021-01-04", "2021-01-08")) src.fetch(Series("IBM", "", "adjclose", "2021-01-04", "2021-01-08"))
assert "denied access to a premium endpoint" in str(e.value) assert "denied access to a premium endpoint" in str(e.value)

View file

@ -10,88 +10,53 @@
}, },
"Time Series (Digital Currency Daily)": { "Time Series (Digital Currency Daily)": {
"2021-01-09": { "2021-01-09": {
"1a. open (AUD)": "55074.06950240", "1. open": "55074.06950240",
"1b. open (USD)": "40586.96000000", "2. high": "56150.17720000",
"2a. high (AUD)": "56150.17720000", "3. low": "52540.71680000",
"2b. high (USD)": "41380.00000000", "4. close": "54397.30924680",
"3a. low (AUD)": "52540.71680000", "5. volume": "75785.97967500"
"3b. low (USD)": "38720.00000000",
"4a. close (AUD)": "54397.30924680",
"4b. close (USD)": "40088.22000000",
"5. volume": "75785.97967500",
"6. market cap (USD)": "75785.97967500"
}, },
"2021-01-08": { "2021-01-08": {
"1a. open (AUD)": "53507.50941120", "1. open": "53507.50941120",
"1b. open (USD)": "39432.48000000", "2. high": "56923.63300000",
"2a. high (AUD)": "56923.63300000", "3. low": "49528.31000000",
"2b. high (USD)": "41950.00000000", "4. close": "55068.43820140",
"3a. low (AUD)": "49528.31000000", "5. volume": "139789.95749900"
"3b. low (USD)": "36500.00000000",
"4a. close (AUD)": "55068.43820140",
"4b. close (USD)": "40582.81000000",
"5. volume": "139789.95749900",
"6. market cap (USD)": "139789.95749900"
}, },
"2021-01-07": { "2021-01-07": {
"1a. open (AUD)": "49893.81535840", "1. open": "49893.81535840",
"1b. open (USD)": "36769.36000000", "2. high": "54772.88310000",
"2a. high (AUD)": "54772.88310000", "3. low": "49256.92200000",
"2b. high (USD)": "40365.00000000", "4. close": "53507.23802320",
"3a. low (AUD)": "49256.92200000", "5. volume": "132825.70043700"
"3b. low (USD)": "36300.00000000",
"4a. close (AUD)": "53507.23802320",
"4b. close (USD)": "39432.28000000",
"5. volume": "132825.70043700",
"6. market cap (USD)": "132825.70043700"
}, },
"2021-01-06": { "2021-01-06": {
"1a. open (AUD)": "46067.47523820", "1. open": "46067.47523820",
"1b. open (USD)": "33949.53000000", "2. high": "50124.29161740",
"2a. high (AUD)": "50124.29161740", "3. low": "45169.81872000",
"2b. high (USD)": "36939.21000000", "4. close": "49893.81535840",
"3a. low (AUD)": "45169.81872000", "5. volume": "127139.20131000"
"3b. low (USD)": "33288.00000000",
"4a. close (AUD)": "49893.81535840",
"4b. close (USD)": "36769.36000000",
"5. volume": "127139.20131000",
"6. market cap (USD)": "127139.20131000"
}, },
"2021-01-05": { "2021-01-05": {
"1a. open (AUD)": "43408.17136500", "1. open": "43408.17136500",
"1b. open (USD)": "31989.75000000", "2. high": "46624.45840000",
"2a. high (AUD)": "46624.45840000", "3. low": "40572.50600000",
"2b. high (USD)": "34360.00000000", "4. close": "46067.47523820",
"3a. low (AUD)": "40572.50600000", "5. volume": "116049.99703800"
"3b. low (USD)": "29900.00000000",
"4a. close (AUD)": "46067.47523820",
"4b. close (USD)": "33949.53000000",
"5. volume": "116049.99703800",
"6. market cap (USD)": "116049.99703800"
}, },
"2021-01-04": { "2021-01-04": {
"1a. open (AUD)": "44779.08784700", "1. open": "44779.08784700",
"1b. open (USD)": "33000.05000000", "2. high": "45593.18400000",
"2a. high (AUD)": "45593.18400000", "3. low": "38170.72220000",
"2b. high (USD)": "33600.00000000", "4. close": "43406.76014740",
"3a. low (AUD)": "38170.72220000", "5. volume": "140899.88569000"
"3b. low (USD)": "28130.00000000",
"4a. close (AUD)": "43406.76014740",
"4b. close (USD)": "31988.71000000",
"5. volume": "140899.88569000",
"6. market cap (USD)": "140899.88569000"
}, },
"2021-01-03": { "2021-01-03": {
"1a. open (AUD)": "43661.51206300", "1. open": "43661.51206300",
"1b. open (USD)": "32176.45000000", "2. high": "47191.80858340",
"2a. high (AUD)": "47191.80858340", "3. low": "43371.85965060",
"2b. high (USD)": "34778.11000000", "4. close": "44779.08784700",
"3a. low (AUD)": "43371.85965060", "5. volume": "120957.56675000"
"3b. low (USD)": "31962.99000000",
"4a. close (AUD)": "44779.08784700",
"4b. close (USD)": "33000.05000000",
"5. volume": "120957.56675000",
"6. market cap (USD)": "120957.56675000"
} }
} }
} }

View file

@ -36,9 +36,10 @@ def requests_mock():
yield mock yield mock
crypto_url = "https://web-api.coinmarketcap.com/v1/cryptocurrency/map?sort=cmc_rank" crypto_url = (
fiat_url = "https://web-api.coinmarketcap.com/v1/fiat/map?include_metals=true" "https://api.coinmarketcap.com/data-api/v1/cryptocurrency/map?sort=cmc_rank"
fetch_url = "https://web-api.coinmarketcap.com/v1/cryptocurrency/ohlcv/historical" )
fetch_url = "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical"
@pytest.fixture @pytest.fixture
@ -48,13 +49,6 @@ def crypto_ok(requests_mock):
yield requests_mock yield requests_mock
@pytest.fixture
def fiat_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "fiat-partial.json").read_text()
requests_mock.add(responses.GET, fiat_url, body=json, status=200)
yield requests_mock
@pytest.fixture @pytest.fixture
def recent_id_id_ok(requests_mock): def recent_id_id_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "recent-id1-id2782.json").read_text() json = (Path(os.path.splitext(__file__)[0]) / "recent-id1-id2782.json").read_text()
@ -62,36 +56,6 @@ def recent_id_id_ok(requests_mock):
yield requests_mock yield requests_mock
@pytest.fixture
def recent_id_sym_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "recent-id1-aud.json").read_text()
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
yield requests_mock
@pytest.fixture
def recent_sym_id_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "recent-btc-id2782.json").read_text()
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
yield requests_mock
@pytest.fixture
def recent_sym_sym_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "recent-btc-aud.json").read_text()
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
yield requests_mock
@pytest.fixture
def long_sym_sym_ok(requests_mock):
json = (
Path(os.path.splitext(__file__)[0]) / "long-btc-aud-partial.json"
).read_text()
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
yield requests_mock
def test_normalizesymbol(src): def test_normalizesymbol(src):
assert src.normalizesymbol("btc") == "BTC" assert src.normalizesymbol("btc") == "BTC"
assert src.normalizesymbol("id=1") == "ID=1" assert src.normalizesymbol("id=1") == "ID=1"
@ -120,63 +84,31 @@ def test_metadata(src):
assert isinstance(src.notes(), str) assert isinstance(src.notes(), str)
def test_symbols(src, crypto_ok, fiat_ok): def test_symbols(src, crypto_ok):
syms = src.symbols() syms = src.symbols()
assert ("id=1", "BTC Bitcoin") in syms assert ("id=1", "BTC Bitcoin") in syms
assert ("id=2782", "AUD Australian Dollar") in syms assert ("id=2782", "AUD Australian Dollar") in syms
assert len(syms) > 2 assert len(syms) > 2
def test_symbols_requests_logged(src, crypto_ok, fiat_ok, caplog): def test_symbols_request_logged(src, crypto_ok, caplog):
with caplog.at_level(logging.DEBUG): with caplog.at_level(logging.DEBUG):
src.symbols() src.symbols()
logged_requests = 0 logged_requests = 0
for r in caplog.records: for r in caplog.records:
if r.levelname == "DEBUG" and "curl " in r.message: if r.levelname == "DEBUG" and "curl " in r.message:
logged_requests += 1 logged_requests += 1
assert logged_requests == 2 assert logged_requests == 1
def test_symbols_fiat_not_found(src, requests_mock): def test_symbols_crypto_not_found(src, requests_mock):
requests_mock.add(responses.GET, fiat_url, body="{}", status=200)
with pytest.raises(exceptions.ResponseParsingError) as e:
src.symbols()
assert "Unexpected content" in str(e.value)
def test_symbols_fiat_network_issue(src, requests_mock):
requests_mock.add(
responses.GET,
fiat_url,
body=requests.exceptions.ConnectionError("Network issue"),
)
with pytest.raises(exceptions.RequestError) as e:
src.symbols()
assert "Network issue" in str(e.value)
def test_symbols_fiat_bad_status(src, requests_mock):
requests_mock.add(responses.GET, fiat_url, status=500)
with pytest.raises(exceptions.BadResponse) as e:
src.symbols()
assert "Server Error" in str(e.value)
def test_symbols_fiat_parsing_error(src, requests_mock):
requests_mock.add(responses.GET, fiat_url, body="NOT JSON")
with pytest.raises(exceptions.ResponseParsingError) as e:
src.symbols()
assert "while parsing data" in str(e.value)
def test_symbols_crypto_not_found(src, requests_mock, fiat_ok):
requests_mock.add(responses.GET, crypto_url, body="{}", status=200) requests_mock.add(responses.GET, crypto_url, body="{}", status=200)
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.symbols() src.symbols()
assert "Unexpected content" in str(e.value) assert "Unexpected content" in str(e.value)
def test_symbols_crypto_network_issue(src, requests_mock, fiat_ok): def test_symbols_crypto_network_issue(src, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
crypto_url, crypto_url,
@ -187,14 +119,14 @@ def test_symbols_crypto_network_issue(src, requests_mock, fiat_ok):
assert "Network issue" in str(e.value) assert "Network issue" in str(e.value)
def test_symbols_crypto_bad_status(src, requests_mock, fiat_ok): def test_symbols_crypto_bad_status(src, requests_mock):
requests_mock.add(responses.GET, crypto_url, status=500) requests_mock.add(responses.GET, crypto_url, status=500)
with pytest.raises(exceptions.BadResponse) as e: with pytest.raises(exceptions.BadResponse) as e:
src.symbols() src.symbols()
assert "Server Error" in str(e.value) assert "Server Error" in str(e.value)
def test_symbols_crypto_parsing_error(src, requests_mock, fiat_ok): def test_symbols_crypto_parsing_error(src, requests_mock):
requests_mock.add(responses.GET, crypto_url, body="NOT JSON") requests_mock.add(responses.GET, crypto_url, body="NOT JSON")
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.symbols() src.symbols()
@ -202,59 +134,59 @@ def test_symbols_crypto_parsing_error(src, requests_mock, fiat_ok):
def test_symbols_no_data(src, type, requests_mock): def test_symbols_no_data(src, type, requests_mock):
requests_mock.add(responses.GET, fiat_url, body='{"data": []}') requests_mock.add(responses.GET, crypto_url, body='{"data": []}')
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.symbols() src.symbols()
assert "Empty data section" in str(e.value) assert "Empty data section" in str(e.value)
def test_fetch_known_pair_id_id(src, type, recent_id_id_ok, crypto_ok, fiat_ok): def test_fetch_known_pair_id_id(src, type, recent_id_id_ok, crypto_ok):
series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07")) series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07"))
req = recent_id_id_ok.calls[0].request req = recent_id_id_ok.calls[0].request
assert req.params["id"] == "1" assert req.params["id"] == "1"
assert req.params["convert_id"] == "2782" assert req.params["convertId"] == "2782"
assert (series.base, series.quote) == ("BTC", "AUD") assert (series.base, series.quote) == ("BTC", "AUD")
assert len(series.prices) == 7 assert len(series.prices) == 7
def test_fetch_known_pair_id_sym(src, type, recent_id_sym_ok): def test_fetch_known_pair_id_sym(src, type, recent_id_id_ok, crypto_ok):
series = src.fetch(Series("ID=1", "AUD", type, "2021-01-01", "2021-01-07")) series = src.fetch(Series("ID=1", "AUD", type, "2021-01-01", "2021-01-07"))
req = recent_id_sym_ok.calls[0].request req = recent_id_id_ok.calls[1].request
assert req.params["id"] == "1" assert req.params["id"] == "1"
assert req.params["convert"] == "AUD" assert req.params["convertId"] == "2782"
assert (series.base, series.quote) == ("BTC", "AUD") assert (series.base, series.quote) == ("BTC", "AUD")
assert len(series.prices) == 7 assert len(series.prices) == 7
def test_fetch_known_pair_sym_id(src, type, recent_sym_id_ok, crypto_ok, fiat_ok): def test_fetch_known_pair_sym_id(src, type, recent_id_id_ok, crypto_ok):
series = src.fetch(Series("BTC", "ID=2782", type, "2021-01-01", "2021-01-07")) series = src.fetch(Series("BTC", "ID=2782", type, "2021-01-01", "2021-01-07"))
req = recent_sym_id_ok.calls[0].request req = recent_id_id_ok.calls[1].request
assert req.params["symbol"] == "BTC" assert req.params["id"] == "1"
assert req.params["convert_id"] == "2782" assert req.params["convertId"] == "2782"
assert (series.base, series.quote) == ("BTC", "AUD") assert (series.base, series.quote) == ("BTC", "AUD")
assert len(series.prices) == 7 assert len(series.prices) == 7
def test_fetch_known_pair_sym_sym(src, type, recent_sym_sym_ok): def test_fetch_known_pair_sym_sym(src, type, recent_id_id_ok, crypto_ok):
series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07")) series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
req = recent_sym_sym_ok.calls[0].request req = recent_id_id_ok.calls[1].request
assert req.params["symbol"] == "BTC" assert req.params["id"] == "1"
assert req.params["convert"] == "AUD" assert req.params["convertId"] == "2782"
assert len(series.prices) == 7 assert len(series.prices) == 7
def test_fetch_requests_and_receives_correct_times( def test_fetch_requests_and_receives_correct_times(
src, type, recent_id_id_ok, crypto_ok, fiat_ok src, type, recent_id_id_ok, crypto_ok
): ):
series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07")) series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07"))
req = recent_id_id_ok.calls[0].request req = recent_id_id_ok.calls[0].request
assert req.params["time_start"] == str(timestamp("2020-12-31")) # back one period assert req.params["timeStart"] == str(timestamp("2020-12-31")) # back one period
assert req.params["time_end"] == str(timestamp("2021-01-07")) assert req.params["timeEnd"] == str(timestamp("2021-01-07"))
assert series.prices[0] == Price("2021-01-01", Decimal("37914.350602379853")) assert series.prices[0] == Price("2021-01-01", Decimal("37914.35060237985"))
assert series.prices[-1] == Price("2021-01-07", Decimal("49370.064689585612")) assert series.prices[-1] == Price("2021-01-07", Decimal("49369.66288590665"))
def test_fetch_requests_logged(src, type, recent_sym_sym_ok, caplog): def test_fetch_requests_logged(src, type, crypto_ok, recent_id_id_ok, caplog):
with caplog.at_level(logging.DEBUG): with caplog.at_level(logging.DEBUG):
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
assert any( assert any(
@ -262,20 +194,20 @@ def test_fetch_requests_logged(src, type, recent_sym_sym_ok, caplog):
) )
def test_fetch_types_all_available(src, recent_sym_sym_ok): def test_fetch_types_all_available(src, crypto_ok, recent_id_id_ok):
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07")) mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07"))
opn = src.fetch(Series("BTC", "AUD", "open", "2021-01-01", "2021-01-07")) opn = src.fetch(Series("BTC", "AUD", "open", "2021-01-01", "2021-01-07"))
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07")) hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07"))
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07")) low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07"))
cls = src.fetch(Series("BTC", "AUD", "close", "2021-01-01", "2021-01-07")) cls = src.fetch(Series("BTC", "AUD", "close", "2021-01-01", "2021-01-07"))
assert mid.prices[0].amount == Decimal("37914.350602379853") assert mid.prices[0].amount == Decimal("37914.35060237985")
assert opn.prices[0].amount == Decimal("37658.83948707033") assert opn.prices[0].amount == Decimal("37658.1146368474")
assert hgh.prices[0].amount == Decimal("38417.9137031205") assert hgh.prices[0].amount == Decimal("38417.9137031205")
assert low.prices[0].amount == Decimal("37410.787501639206") assert low.prices[0].amount == Decimal("37410.7875016392")
assert cls.prices[0].amount == Decimal("38181.99133300758") assert cls.prices[0].amount == Decimal("38181.9913330076")
def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_sym_sym_ok): def test_fetch_type_mid_is_mean_of_low_and_high(src, crypto_ok, recent_id_id_ok):
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07")).prices mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07")).prices
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07")).prices low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07")).prices
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07")).prices hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07")).prices
@ -287,80 +219,24 @@ def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_sym_sym_ok):
) )
def test_fetch_long_hist_from_start(src, type, long_sym_sym_ok): def test_fetch_empty(src, type, crypto_ok, requests_mock):
series = src.fetch(Series("BTC", "AUD", type, src.start(), "2021-01-07"))
assert series.prices[0] == Price("2013-04-28", Decimal("130.45956234123247"))
assert series.prices[-1] == Price("2021-01-07", Decimal("49370.064689585612"))
assert len(series.prices) > 13
def test_fetch_from_before_start(src, type, requests_mock):
requests_mock.add(
responses.GET,
fetch_url,
status=400,
body="""{ "status": { "error_code": 400, "error_message":
"\\"time_start\\" must be a valid ISO 8601 timestamp or unix time value",
} }""",
)
with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("BTC", "AUD", type, "2001-09-10", "2001-10-01"))
assert "start date can't preceed" in str(e.value)
def test_fetch_to_future(src, type, recent_sym_sym_ok):
series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2100-01-01"))
assert len(series.prices) > 0
def test_fetch_in_future(src, type, requests_mock):
requests_mock.add(
responses.GET,
fetch_url,
status=400,
body="""{
"status": {
"error_code": 400,
"error_message": "\\"time_start\\" must be older than \\"time_end\\"."
}
}""",
)
with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("BTC", "AUD", type, "2030-01-01", "2030-01-07"))
assert "start date must be in the past" in str(e.value)
def test_fetch_reversed_dates(src, type, requests_mock):
requests_mock.add(
responses.GET,
fetch_url,
status=400,
body="""{
"status": {
"error_code": 400,
"error_message": "\\"time_start\\" must be older than \\"time_end\\"."
}
}""",
)
with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("BTC", "AUD", type, "2021-01-07", "2021-01-01"))
assert "start date must preceed or match the end" in str(e.value)
def test_fetch_empty(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
fetch_url, fetch_url,
body="""{ body="""{
"status": {
"error_code": 0,
"error_message": null
},
"data": { "data": {
"id": 1, "id": 1,
"name": "Bitcoin", "name": "Bitcoin",
"symbol": "BTC", "symbol": "BTC",
"timeEnd": "1228348799",
"quotes": [] "quotes": []
},
"status": {
"timestamp": "2024-08-03T09:31:52.719Z",
"error_code": "0",
"error_message": "SUCCESS",
"elapsed": "14",
"credit_count": 0
} }
}""", }""",
) )
@ -368,63 +244,36 @@ def test_fetch_empty(src, type, requests_mock):
assert len(series.prices) == 0 assert len(series.prices) == 0
def test_fetch_bad_base_sym(src, type, requests_mock): def test_fetch_bad_base_sym(src, type, crypto_ok):
requests_mock.add(responses.GET, fetch_url, body='{"data":{}}') with pytest.raises(exceptions.InvalidPair) as e:
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("NOTABASE", "USD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("NOTABASE", "USD", type, "2021-01-01", "2021-01-07"))
assert "quote currency symbol can't be found" in str(e.value) assert "Invalid symbol 'NOTABASE'" in str(e.value)
assert "other reasons" in str(e.value)
def test_fetch_bad_quote_sym(src, type, requests_mock): def test_fetch_bad_quote_sym(src, type, crypto_ok):
requests_mock.add(
responses.GET,
fetch_url,
status=400,
body="""{
"status": {
"error_code": 400,
"error_message": "Invalid value for \\"convert\\": \\"NOTAQUOTE\\""
}
}""",
)
with pytest.raises(exceptions.InvalidPair) as e: with pytest.raises(exceptions.InvalidPair) as e:
src.fetch(Series("BTC", "NOTAQUOTE", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "NOTAQUOTE", type, "2021-01-01", "2021-01-07"))
assert "Bad quote symbol" in str(e.value) assert "Invalid symbol 'NOTAQUOTE'" in str(e.value)
def test_fetch_bad_base_id(src, type, requests_mock): def test_fetch_bad_response(src, type, crypto_ok, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
fetch_url, fetch_url,
status=400, status=200,
body="""{ body="""{
"status": { "status": {
"error_code": 400, "timestamp": "2024-08-03T09:42:43.699Z",
"error_message": "No items found." "error_code": "500",
"error_message": "The system is busy, please try again later!",
"elapsed": "0",
"credit_count": 0
} }
}""", }""",
) )
with pytest.raises(exceptions.InvalidPair) as e: with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("ID=20000", "USD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("ID=987654321", "USD", type, "2021-01-01", "2021-01-07"))
assert "Bad base ID" in str(e.value) assert "general error" in str(e.value)
def test_fetch_bad_quote_id(src, type, requests_mock):
requests_mock.add(
responses.GET,
fetch_url,
status=400,
body="""{
"status": {
"error_code": 400,
"error_message": "Invalid value for \\"convert_id\\": \\"20000\\""
}
}""",
)
with pytest.raises(exceptions.InvalidPair) as e:
src.fetch(Series("BTC", "ID=20000", type, "2021-01-01", "2021-01-07"))
assert "Bad quote ID" in str(e.value)
def test_fetch_no_quote(src, type): def test_fetch_no_quote(src, type):
@ -432,7 +281,7 @@ def test_fetch_no_quote(src, type):
src.fetch(Series("BTC", "", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "", type, "2021-01-01", "2021-01-07"))
def test_fetch_network_issue(src, type, requests_mock): def test_fetch_network_issue(src, type, crypto_ok, requests_mock):
body = requests.exceptions.ConnectionError("Network issue") body = requests.exceptions.ConnectionError("Network issue")
requests_mock.add(responses.GET, fetch_url, body=body) requests_mock.add(responses.GET, fetch_url, body=body)
with pytest.raises(exceptions.RequestError) as e: with pytest.raises(exceptions.RequestError) as e:
@ -440,21 +289,21 @@ def test_fetch_network_issue(src, type, requests_mock):
assert "Network issue" in str(e.value) assert "Network issue" in str(e.value)
def test_fetch_bad_status(src, type, requests_mock): def test_fetch_bad_status(src, type, crypto_ok, requests_mock):
requests_mock.add(responses.GET, fetch_url, status=500, body="Some other reason") requests_mock.add(responses.GET, fetch_url, status=500, body="Some other reason")
with pytest.raises(exceptions.BadResponse) as e: with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
assert "Internal Server Error" in str(e.value) assert "Internal Server Error" in str(e.value)
def test_fetch_parsing_error(src, type, requests_mock): def test_fetch_parsing_error(src, type, crypto_ok, requests_mock):
requests_mock.add(responses.GET, fetch_url, body="NOT JSON") requests_mock.add(responses.GET, fetch_url, body="NOT JSON")
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
assert "while parsing data" in str(e.value) assert "while parsing data" in str(e.value)
def test_fetch_unexpected_json(src, type, requests_mock): def test_fetch_unexpected_json(src, type, crypto_ok, requests_mock):
requests_mock.add(responses.GET, fetch_url, body='{"notdata": []}') requests_mock.add(responses.GET, fetch_url, body='{"notdata": []}')
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07")) src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))

View file

@ -1,30 +0,0 @@
{
"status": {
"timestamp": "2021-07-16T10:08:13.272Z",
"error_code": 0,
"error_message": null,
"elapsed": 1,
"credit_count": 0,
"notice": null
},
"data": [
{
"id": 2781,
"name": "United States Dollar",
"sign": "$",
"symbol": "USD"
},
{
"id": 2782,
"name": "Australian Dollar",
"sign": "$",
"symbol": "AUD"
},
{
"id": 3575,
"name": "Gold Troy Ounce",
"symbol": "",
"code": "XAU"
}
]
}

View file

@ -1,255 +0,0 @@
{
"status": {
"timestamp": "2021-07-17T16:16:11.926Z",
"error_code": 0,
"error_message": null,
"elapsed": 2262,
"credit_count": 0,
"notice": null
},
"data": {
"id": 1,
"name": "Bitcoin",
"symbol": "BTC",
"quotes": [
{
"time_open": "2013-04-28T00:00:00.000Z",
"time_close": "2013-04-28T23:59:59.999Z",
"time_high": "2013-04-28T18:50:02.000Z",
"time_low": "2013-04-28T20:15:02.000Z",
"quote": {
"AUD": {
"open": null,
"high": 132.39216797540558,
"low": 128.52695670705936,
"close": 130.52908647526473,
"volume": 0,
"market_cap": 1447740447.626921,
"timestamp": "2013-04-28T23:59:00.000Z"
}
}
},
{
"time_open": "2013-04-29T00:00:00.000Z",
"time_close": "2013-04-29T23:59:59.999Z",
"time_high": "2013-04-29T13:15:01.000Z",
"time_low": "2013-04-29T05:20:01.000Z",
"quote": {
"AUD": {
"open": 130.75666236543535,
"high": 142.67970067891736,
"low": 129.9456943366951,
"close": 139.77370978254794,
"volume": 0,
"market_cap": 1550883729.329852,
"timestamp": "2013-04-29T23:59:00.000Z"
}
}
},
{
"time_open": "2013-04-30T00:00:00.000Z",
"time_close": "2013-04-30T23:59:59.999Z",
"time_high": "2013-04-30T08:25:02.000Z",
"time_low": "2013-04-30T18:55:01.000Z",
"quote": {
"AUD": {
"open": 139.2515230635335,
"high": 141.93391873626476,
"low": 129.37940647790543,
"close": 134.06635802469137,
"volume": 0,
"market_cap": 1488052782.6003087,
"timestamp": "2013-04-30T23:59:00.000Z"
}
}
},
{
"time_open": "2013-05-01T00:00:00.000Z",
"time_close": "2013-05-01T23:59:59.999Z",
"time_high": "2013-05-01T00:15:01.000Z",
"time_low": "2013-05-01T19:55:01.000Z",
"quote": {
"AUD": {
"open": 134.06635802469137,
"high": 134.88573849160971,
"low": 104.93911468163968,
"close": 113.79243056489595,
"volume": 0,
"market_cap": 1263451603.6864119,
"timestamp": "2013-05-01T23:59:00.000Z"
}
}
},
{
"time_open": "2013-05-02T00:00:00.000Z",
"time_close": "2013-05-02T23:59:59.999Z",
"time_high": "2013-05-02T14:25:01.000Z",
"time_low": "2013-05-02T14:30:02.000Z",
"quote": {
"AUD": {
"open": 113.19910247390133,
"high": 122.60835462135991,
"low": 90.08385249759387,
"close": 102.63388848353591,
"volume": 0,
"market_cap": 1139905858.2089553,
"timestamp": "2013-05-02T23:59:00.000Z"
}
}
},
{
"time_open": "2013-05-03T00:00:00.000Z",
"time_close": "2013-05-03T23:59:59.999Z",
"time_high": "2013-05-03T05:30:02.000Z",
"time_low": "2013-05-03T03:05:01.000Z",
"quote": {
"AUD": {
"open": 103.64842454394694,
"high": 105.43929629649027,
"low": 77.03544845551335,
"close": 94.77409346519293,
"volume": 0,
"market_cap": 1052933070.3412836,
"timestamp": "2013-05-03T23:59:00.000Z"
}
}
},
{
"time_open": "2013-05-04T00:00:00.000Z",
"time_close": "2013-05-04T23:59:59.999Z",
"time_high": "2013-05-04T07:15:01.000Z",
"time_low": "2013-05-04T06:50:01.000Z",
"quote": {
"AUD": {
"open": 95.11343656595025,
"high": 111.49893348846227,
"low": 89.68392476245879,
"close": 109.07504363001745,
"volume": 0,
"market_cap": 1212251854.2757416,
"timestamp": "2013-05-04T23:59:00.000Z"
}
}
},
{
"time_open": "2021-01-01T00:00:00.000Z",
"time_close": "2021-01-01T23:59:59.999Z",
"time_high": "2021-01-01T12:38:43.000Z",
"time_low": "2021-01-01T00:16:43.000Z",
"quote": {
"AUD": {
"open": 37658.83948707033,
"high": 38417.9137031205,
"low": 37410.787501639206,
"close": 38181.99133300758,
"volume": 52943282221.028366,
"market_cap": 709720173049.5383,
"timestamp": "2021-01-01T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-02T00:00:00.000Z",
"time_close": "2021-01-02T23:59:59.999Z",
"time_high": "2021-01-02T19:49:42.000Z",
"time_low": "2021-01-02T00:31:44.000Z",
"quote": {
"AUD": {
"open": 38184.98611600682,
"high": 43096.681197423015,
"low": 37814.17187096531,
"close": 41760.62923079505,
"volume": 88214867181.97835,
"market_cap": 776278147177.8037,
"timestamp": "2021-01-02T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-03T00:00:00.000Z",
"time_close": "2021-01-03T23:59:59.999Z",
"time_high": "2021-01-03T07:47:38.000Z",
"time_low": "2021-01-03T00:20:45.000Z",
"quote": {
"AUD": {
"open": 41763.41015117659,
"high": 44985.93247585023,
"low": 41663.204350601605,
"close": 42511.10646879765,
"volume": 102011582370.28117,
"market_cap": 790270288834.0249,
"timestamp": "2021-01-03T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-04T00:00:00.000Z",
"time_close": "2021-01-04T23:59:59.999Z",
"time_high": "2021-01-04T04:07:42.000Z",
"time_low": "2021-01-04T10:19:42.000Z",
"quote": {
"AUD": {
"open": 42548.61349648768,
"high": 43360.96165147421,
"low": 37133.98436952697,
"close": 41686.38761359174,
"volume": 105824510346.65779,
"market_cap": 774984045201.7122,
"timestamp": "2021-01-04T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-05T00:00:00.000Z",
"time_close": "2021-01-05T23:59:59.999Z",
"time_high": "2021-01-05T22:44:35.000Z",
"time_low": "2021-01-05T06:16:41.000Z",
"quote": {
"AUD": {
"open": 41693.07321807638,
"high": 44403.79487147647,
"low": 39221.81167941294,
"close": 43790.067253370056,
"volume": 87016490203.50436,
"market_cap": 814135603090.2502,
"timestamp": "2021-01-05T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-06T00:00:00.000Z",
"time_close": "2021-01-06T23:59:59.999Z",
"time_high": "2021-01-06T23:57:36.000Z",
"time_low": "2021-01-06T00:25:38.000Z",
"quote": {
"AUD": {
"open": 43817.35864984641,
"high": 47186.65232598287,
"low": 43152.60281764236,
"close": 47115.85365360005,
"volume": 96330948324.8061,
"market_cap": 876019742889.9551,
"timestamp": "2021-01-06T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-07T00:00:00.000Z",
"time_close": "2021-01-07T23:59:59.999Z",
"time_high": "2021-01-07T18:17:42.000Z",
"time_low": "2021-01-07T08:25:51.000Z",
"quote": {
"AUD": {
"open": 47128.02139328098,
"high": 51833.478207775144,
"low": 46906.65117139608,
"close": 50686.90986207153,
"volume": 109124136558.20264,
"market_cap": 942469208700.134,
"timestamp": "2021-01-07T23:59:06.000Z"
}
}
}
]
}
}

View file

@ -1,136 +0,0 @@
{
"status": {
"timestamp": "2021-07-16T10:42:32.013Z",
"error_code": 0,
"error_message": null,
"elapsed": 20,
"credit_count": 0,
"notice": null
},
"data": {
"id": 1,
"name": "Bitcoin",
"symbol": "BTC",
"quotes": [
{
"time_open": "2021-01-01T00:00:00.000Z",
"time_close": "2021-01-01T23:59:59.999Z",
"time_high": "2021-01-01T12:38:43.000Z",
"time_low": "2021-01-01T00:16:43.000Z",
"quote": {
"AUD": {
"open": 37658.83948707033,
"high": 38417.9137031205,
"low": 37410.787501639206,
"close": 38181.99133300758,
"volume": 52943282221.028366,
"market_cap": 709720173049.5383,
"timestamp": "2021-01-01T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-02T00:00:00.000Z",
"time_close": "2021-01-02T23:59:59.999Z",
"time_high": "2021-01-02T19:49:42.000Z",
"time_low": "2021-01-02T00:31:44.000Z",
"quote": {
"AUD": {
"open": 38184.98611600682,
"high": 43096.681197423015,
"low": 37814.17187096531,
"close": 41760.62923079505,
"volume": 88214867181.97835,
"market_cap": 776278147177.8037,
"timestamp": "2021-01-02T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-03T00:00:00.000Z",
"time_close": "2021-01-03T23:59:59.999Z",
"time_high": "2021-01-03T07:47:38.000Z",
"time_low": "2021-01-03T00:20:45.000Z",
"quote": {
"AUD": {
"open": 41763.41015117659,
"high": 44985.93247585023,
"low": 41663.204350601605,
"close": 42511.10646879765,
"volume": 102011582370.28117,
"market_cap": 790270288834.0249,
"timestamp": "2021-01-03T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-04T00:00:00.000Z",
"time_close": "2021-01-04T23:59:59.999Z",
"time_high": "2021-01-04T04:07:42.000Z",
"time_low": "2021-01-04T10:19:42.000Z",
"quote": {
"AUD": {
"open": 42548.61349648768,
"high": 43360.96165147421,
"low": 37133.98436952697,
"close": 41686.38761359174,
"volume": 105824510346.65779,
"market_cap": 774984045201.7122,
"timestamp": "2021-01-04T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-05T00:00:00.000Z",
"time_close": "2021-01-05T23:59:59.999Z",
"time_high": "2021-01-05T22:44:35.000Z",
"time_low": "2021-01-05T06:16:41.000Z",
"quote": {
"AUD": {
"open": 41693.07321807638,
"high": 44403.79487147647,
"low": 39221.81167941294,
"close": 43790.067253370056,
"volume": 87016490203.50436,
"market_cap": 814135603090.2502,
"timestamp": "2021-01-05T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-06T00:00:00.000Z",
"time_close": "2021-01-06T23:59:59.999Z",
"time_high": "2021-01-06T23:57:36.000Z",
"time_low": "2021-01-06T00:25:38.000Z",
"quote": {
"AUD": {
"open": 43817.35864984641,
"high": 47186.65232598287,
"low": 43152.60281764236,
"close": 47115.85365360005,
"volume": 96330948324.8061,
"market_cap": 876019742889.9551,
"timestamp": "2021-01-06T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-07T00:00:00.000Z",
"time_close": "2021-01-07T23:59:59.999Z",
"time_high": "2021-01-07T18:17:42.000Z",
"time_low": "2021-01-07T08:25:51.000Z",
"quote": {
"AUD": {
"open": 47128.02139328098,
"high": 51833.478207775144,
"low": 46906.65117139608,
"close": 50686.90986207153,
"volume": 109124136558.20264,
"market_cap": 942469208700.134,
"timestamp": "2021-01-07T23:59:06.000Z"
}
}
}
]
}
}

View file

@ -1,136 +0,0 @@
{
"status": {
"timestamp": "2021-07-16T10:42:27.169Z",
"error_code": 0,
"error_message": null,
"elapsed": 19,
"credit_count": 0,
"notice": null
},
"data": {
"id": 1,
"name": "Bitcoin",
"symbol": "BTC",
"quotes": [
{
"time_open": "2021-01-01T00:00:00.000Z",
"time_close": "2021-01-01T23:59:59.999Z",
"time_high": "2021-01-01T12:38:43.000Z",
"time_low": "2021-01-01T00:16:43.000Z",
"quote": {
"2782": {
"open": 37658.83948707033,
"high": 38417.9137031205,
"low": 37410.787501639206,
"close": 38181.99133300758,
"volume": 52943282221.028366,
"market_cap": 709720173049.5383,
"timestamp": "2021-01-01T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-02T00:00:00.000Z",
"time_close": "2021-01-02T23:59:59.999Z",
"time_high": "2021-01-02T19:49:42.000Z",
"time_low": "2021-01-02T00:31:44.000Z",
"quote": {
"2782": {
"open": 38184.98611600682,
"high": 43096.681197423015,
"low": 37814.17187096531,
"close": 41760.62923079505,
"volume": 88214867181.97835,
"market_cap": 776278147177.8037,
"timestamp": "2021-01-02T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-03T00:00:00.000Z",
"time_close": "2021-01-03T23:59:59.999Z",
"time_high": "2021-01-03T07:47:38.000Z",
"time_low": "2021-01-03T00:20:45.000Z",
"quote": {
"2782": {
"open": 41763.41015117659,
"high": 44985.93247585023,
"low": 41663.204350601605,
"close": 42511.10646879765,
"volume": 102011582370.28117,
"market_cap": 790270288834.0249,
"timestamp": "2021-01-03T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-04T00:00:00.000Z",
"time_close": "2021-01-04T23:59:59.999Z",
"time_high": "2021-01-04T04:07:42.000Z",
"time_low": "2021-01-04T10:19:42.000Z",
"quote": {
"2782": {
"open": 42548.61349648768,
"high": 43360.96165147421,
"low": 37133.98436952697,
"close": 41686.38761359174,
"volume": 105824510346.65779,
"market_cap": 774984045201.7122,
"timestamp": "2021-01-04T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-05T00:00:00.000Z",
"time_close": "2021-01-05T23:59:59.999Z",
"time_high": "2021-01-05T22:44:35.000Z",
"time_low": "2021-01-05T06:16:41.000Z",
"quote": {
"2782": {
"open": 41693.07321807638,
"high": 44403.79487147647,
"low": 39221.81167941294,
"close": 43790.067253370056,
"volume": 87016490203.50436,
"market_cap": 814135603090.2502,
"timestamp": "2021-01-05T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-06T00:00:00.000Z",
"time_close": "2021-01-06T23:59:59.999Z",
"time_high": "2021-01-06T23:57:36.000Z",
"time_low": "2021-01-06T00:25:38.000Z",
"quote": {
"2782": {
"open": 43817.35864984641,
"high": 47186.65232598287,
"low": 43152.60281764236,
"close": 47115.85365360005,
"volume": 96330948324.8061,
"market_cap": 876019742889.9551,
"timestamp": "2021-01-06T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-07T00:00:00.000Z",
"time_close": "2021-01-07T23:59:59.999Z",
"time_high": "2021-01-07T18:17:42.000Z",
"time_low": "2021-01-07T08:25:51.000Z",
"quote": {
"2782": {
"open": 47128.02139328098,
"high": 51833.478207775144,
"low": 46906.65117139608,
"close": 50686.90986207153,
"volume": 109124136558.20264,
"market_cap": 942469208700.134,
"timestamp": "2021-01-07T23:59:06.000Z"
}
}
}
]
}
}

View file

@ -1,136 +0,0 @@
{
"status": {
"timestamp": "2021-07-16T10:42:24.612Z",
"error_code": 0,
"error_message": null,
"elapsed": 57,
"credit_count": 0,
"notice": null
},
"data": {
"id": 1,
"name": "Bitcoin",
"symbol": "BTC",
"quotes": [
{
"time_open": "2021-01-01T00:00:00.000Z",
"time_close": "2021-01-01T23:59:59.999Z",
"time_high": "2021-01-01T12:38:43.000Z",
"time_low": "2021-01-01T00:16:43.000Z",
"quote": {
"AUD": {
"open": 37658.83948707033,
"high": 38417.9137031205,
"low": 37410.787501639206,
"close": 38181.99133300758,
"volume": 52943282221.028366,
"market_cap": 709720173049.5383,
"timestamp": "2021-01-01T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-02T00:00:00.000Z",
"time_close": "2021-01-02T23:59:59.999Z",
"time_high": "2021-01-02T19:49:42.000Z",
"time_low": "2021-01-02T00:31:44.000Z",
"quote": {
"AUD": {
"open": 38184.98611600682,
"high": 43096.681197423015,
"low": 37814.17187096531,
"close": 41760.62923079505,
"volume": 88214867181.97835,
"market_cap": 776278147177.8037,
"timestamp": "2021-01-02T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-03T00:00:00.000Z",
"time_close": "2021-01-03T23:59:59.999Z",
"time_high": "2021-01-03T07:47:38.000Z",
"time_low": "2021-01-03T00:20:45.000Z",
"quote": {
"AUD": {
"open": 41763.41015117659,
"high": 44985.93247585023,
"low": 41663.204350601605,
"close": 42511.10646879765,
"volume": 102011582370.28117,
"market_cap": 790270288834.0249,
"timestamp": "2021-01-03T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-04T00:00:00.000Z",
"time_close": "2021-01-04T23:59:59.999Z",
"time_high": "2021-01-04T04:07:42.000Z",
"time_low": "2021-01-04T10:19:42.000Z",
"quote": {
"AUD": {
"open": 42548.61349648768,
"high": 43360.96165147421,
"low": 37133.98436952697,
"close": 41686.38761359174,
"volume": 105824510346.65779,
"market_cap": 774984045201.7122,
"timestamp": "2021-01-04T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-05T00:00:00.000Z",
"time_close": "2021-01-05T23:59:59.999Z",
"time_high": "2021-01-05T22:44:35.000Z",
"time_low": "2021-01-05T06:16:41.000Z",
"quote": {
"AUD": {
"open": 41693.07321807638,
"high": 44403.79487147647,
"low": 39221.81167941294,
"close": 43790.067253370056,
"volume": 87016490203.50436,
"market_cap": 814135603090.2502,
"timestamp": "2021-01-05T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-06T00:00:00.000Z",
"time_close": "2021-01-06T23:59:59.999Z",
"time_high": "2021-01-06T23:57:36.000Z",
"time_low": "2021-01-06T00:25:38.000Z",
"quote": {
"AUD": {
"open": 43817.35864984641,
"high": 47186.65232598287,
"low": 43152.60281764236,
"close": 47115.85365360005,
"volume": 96330948324.8061,
"market_cap": 876019742889.9551,
"timestamp": "2021-01-06T23:59:06.000Z"
}
}
},
{
"time_open": "2021-01-07T00:00:00.000Z",
"time_close": "2021-01-07T23:59:59.999Z",
"time_high": "2021-01-07T18:17:42.000Z",
"time_low": "2021-01-07T08:25:51.000Z",
"quote": {
"AUD": {
"open": 47128.02139328098,
"high": 51833.478207775144,
"low": 46906.65117139608,
"close": 50686.90986207153,
"volume": 109124136558.20264,
"market_cap": 942469208700.134,
"timestamp": "2021-01-07T23:59:06.000Z"
}
}
}
]
}
}

View file

@ -1,136 +1,129 @@
{ {
"status": {
"timestamp": "2021-07-16T10:42:21.065Z",
"error_code": 0,
"error_message": null,
"elapsed": 17,
"credit_count": 0,
"notice": null
},
"data": { "data": {
"id": 1, "id": 1,
"name": "Bitcoin", "name": "Bitcoin",
"symbol": "BTC", "symbol": "BTC",
"timeEnd": "1575503999",
"quotes": [ "quotes": [
{ {
"time_open": "2021-01-01T00:00:00.000Z", "timeOpen": "2021-01-01T00:00:00.000Z",
"time_close": "2021-01-01T23:59:59.999Z", "timeClose": "2021-01-01T23:59:59.999Z",
"time_high": "2021-01-01T12:38:43.000Z", "timeHigh": "2021-01-01T12:38:43.000Z",
"time_low": "2021-01-01T00:16:43.000Z", "timeLow": "2021-01-01T00:16:43.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 37658.83948707033, "open": 37658.1146368474,
"high": 38417.9137031205, "high": 38417.9137031205,
"low": 37410.787501639206, "low": 37410.7875016392,
"close": 38181.99133300758, "close": 38181.9913330076,
"volume": 52943282221.028366, "volume": 52901492931.8344367080,
"market_cap": 709720173049.5383, "marketCap": 709159975413.2388897949,
"timestamp": "2021-01-01T23:59:06.000Z" "timestamp": "2021-01-01T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-02T00:00:00.000Z", "timeOpen": "2021-01-02T00:00:00.000Z",
"time_close": "2021-01-02T23:59:59.999Z", "timeClose": "2021-01-02T23:59:59.999Z",
"time_high": "2021-01-02T19:49:42.000Z", "timeHigh": "2021-01-02T19:49:42.000Z",
"time_low": "2021-01-02T00:31:44.000Z", "timeLow": "2021-01-02T00:31:44.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 38184.98611600682, "open": 38184.9861160068,
"high": 43096.681197423015, "high": 43096.6811974230,
"low": 37814.17187096531, "low": 37814.1718709653,
"close": 41760.62923079505, "close": 41760.6292307951,
"volume": 88214867181.97835, "volume": 88214867181.9830439141,
"market_cap": 776278147177.8037, "marketCap": 776278147177.8037261338,
"timestamp": "2021-01-02T23:59:06.000Z" "timestamp": "2021-01-02T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-03T00:00:00.000Z", "timeOpen": "2021-01-03T00:00:00.000Z",
"time_close": "2021-01-03T23:59:59.999Z", "timeClose": "2021-01-03T23:59:59.999Z",
"time_high": "2021-01-03T07:47:38.000Z", "timeHigh": "2021-01-03T07:47:38.000Z",
"time_low": "2021-01-03T00:20:45.000Z", "timeLow": "2021-01-03T00:20:45.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 41763.41015117659, "open": 41763.4101511766,
"high": 44985.93247585023, "high": 44985.9324758502,
"low": 41663.204350601605, "low": 41663.2043506016,
"close": 42511.10646879765, "close": 42534.0538859236,
"volume": 102011582370.28117, "volume": 102253005977.1115650988,
"market_cap": 790270288834.0249, "marketCap": 792140565709.1701340036,
"timestamp": "2021-01-03T23:59:06.000Z" "timestamp": "2021-01-03T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-04T00:00:00.000Z", "timeOpen": "2021-01-04T00:00:00.000Z",
"time_close": "2021-01-04T23:59:59.999Z", "timeClose": "2021-01-04T23:59:59.999Z",
"time_high": "2021-01-04T04:07:42.000Z", "timeHigh": "2021-01-04T04:07:42.000Z",
"time_low": "2021-01-04T10:19:42.000Z", "timeLow": "2021-01-04T10:19:42.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 42548.61349648768, "open": 42548.6134964877,
"high": 43360.96165147421, "high": 43347.7527651400,
"low": 37133.98436952697, "low": 37111.8678479690,
"close": 41686.38761359174, "close": 41707.4890765162,
"volume": 105824510346.65779, "volume": 105251252720.3013091567,
"market_cap": 774984045201.7122, "marketCap": 770785910830.3801120744,
"timestamp": "2021-01-04T23:59:06.000Z" "timestamp": "2021-01-04T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-05T00:00:00.000Z", "timeOpen": "2021-01-05T00:00:00.000Z",
"time_close": "2021-01-05T23:59:59.999Z", "timeClose": "2021-01-05T23:59:59.999Z",
"time_high": "2021-01-05T22:44:35.000Z", "timeHigh": "2021-01-05T22:44:35.000Z",
"time_low": "2021-01-05T06:16:41.000Z", "timeLow": "2021-01-05T06:16:41.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 41693.07321807638, "open": 41693.0732180764,
"high": 44403.79487147647, "high": 44406.6531914952,
"low": 39221.81167941294, "low": 39220.9654861842,
"close": 43790.067253370056, "close": 43777.4560620835,
"volume": 87016490203.50436, "volume": 88071174132.6445648582,
"market_cap": 814135603090.2502, "marketCap": 824003338903.4613958343,
"timestamp": "2021-01-05T23:59:06.000Z" "timestamp": "2021-01-05T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-06T00:00:00.000Z", "timeOpen": "2021-01-06T00:00:00.000Z",
"time_close": "2021-01-06T23:59:59.999Z", "timeClose": "2021-01-06T23:59:59.999Z",
"time_high": "2021-01-06T23:57:36.000Z", "timeHigh": "2021-01-06T23:57:36.000Z",
"time_low": "2021-01-06T00:25:38.000Z", "timeLow": "2021-01-06T00:25:38.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 43817.35864984641, "open": 43798.3790529373,
"high": 47186.65232598287, "high": 47185.7303335186,
"low": 43152.60281764236, "low": 43152.6028176424,
"close": 47115.85365360005, "close": 47114.9330444897,
"volume": 96330948324.8061, "volume": 96948095813.7503737302,
"market_cap": 876019742889.9551, "marketCap": 881631993096.0701475336,
"timestamp": "2021-01-06T23:59:06.000Z" "timestamp": "2021-01-06T23:59:59.999Z"
}
} }
}, },
{ {
"time_open": "2021-01-07T00:00:00.000Z", "timeOpen": "2021-01-07T00:00:00.000Z",
"time_close": "2021-01-07T23:59:59.999Z", "timeClose": "2021-01-07T23:59:59.999Z",
"time_high": "2021-01-07T18:17:42.000Z", "timeHigh": "2021-01-07T18:17:42.000Z",
"time_low": "2021-01-07T08:25:51.000Z", "timeLow": "2021-01-07T08:25:51.000Z",
"quote": { "quote": {
"2782": { "name": "2782",
"open": 47128.02139328098, "open": 47128.0213932810,
"high": 51833.478207775144, "high": 51832.6746004172,
"low": 46906.65117139608, "low": 46906.6511713961,
"close": 50686.90986207153, "close": 50660.9643451606,
"volume": 109124136558.20264, "volume": 108451040396.2660095877,
"market_cap": 942469208700.134, "marketCap": 936655898949.2177196744,
"timestamp": "2021-01-07T23:59:06.000Z" "timestamp": "2021-01-07T23:59:59.999Z"
}
} }
} }
] ]
},
"status": {
"timestamp": "2024-08-02T18:23:21.586Z",
"error_code": "0",
"error_message": "SUCCESS",
"elapsed": "212",
"credit_count": 0
} }
} }

View file

@ -36,38 +36,28 @@ def requests_mock():
yield mock yield mock
spark_url = "https://query1.finance.yahoo.com/v7/finance/spark" def url(base):
return f"https://query1.finance.yahoo.com/v8/finance/chart/{base}"
def history_url(base):
return f"https://query1.finance.yahoo.com/v7/finance/download/{base}"
@pytest.fixture
def spark_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "tsla-spark.json").read_text()
requests_mock.add(responses.GET, spark_url, body=json, status=200)
yield requests_mock
@pytest.fixture @pytest.fixture
def recent_ok(requests_mock): def recent_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "tsla-recent.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "tsla-recent.json").read_text()
requests_mock.add(responses.GET, history_url("TSLA"), body=json, status=200) requests_mock.add(responses.GET, url("TSLA"), body=json, status=200)
yield requests_mock yield requests_mock
@pytest.fixture @pytest.fixture
def long_ok(requests_mock): def long_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "ibm-long-partial.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "ibm-long-partial.json").read_text()
requests_mock.add(responses.GET, history_url("IBM"), body=json, status=200) requests_mock.add(responses.GET, url("IBM"), body=json, status=200)
yield requests_mock yield requests_mock
@pytest.fixture @pytest.fixture
def date_with_nulls_ok(requests_mock): def with_null_ok(requests_mock):
json = (Path(os.path.splitext(__file__)[0]) / "ibm-date-with-nulls.csv").read_text() json = (Path(os.path.splitext(__file__)[0]) / "inrx-with-null.json").read_text()
requests_mock.add(responses.GET, history_url("IBM"), body=json, status=200) requests_mock.add(responses.GET, url("INR=X"), body=json, status=200)
yield requests_mock yield requests_mock
@ -105,59 +95,57 @@ def test_symbols(src, caplog):
assert any(["Find the symbol of interest on" in r.message for r in caplog.records]) assert any(["Find the symbol of interest on" in r.message for r in caplog.records])
def test_fetch_known(src, type, spark_ok, recent_ok): def test_fetch_known(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
spark_req = recent_ok.calls[0].request req = recent_ok.calls[0].request
hist_req = recent_ok.calls[1].request assert req.params["events"] == "capitalGain%7Cdiv%7Csplit"
assert spark_req.params["symbols"] == "TSLA" assert req.params["includeAdjustedClose"] == "true"
assert hist_req.params["events"] == "history"
assert hist_req.params["includeAdjustedClose"] == "true"
assert (series.base, series.quote) == ("TSLA", "USD") assert (series.base, series.quote) == ("TSLA", "USD")
assert len(series.prices) == 5 assert len(series.prices) == 5
def test_fetch_requests_and_receives_correct_times(src, type, spark_ok, recent_ok): def test_fetch_requests_and_receives_correct_times(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
hist_req = recent_ok.calls[1].request req = recent_ok.calls[0].request
assert hist_req.params["period1"] == str(timestamp("2021-01-04")) assert req.params["period1"] == str(timestamp("2021-01-04"))
assert hist_req.params["period2"] == str(timestamp("2021-01-09")) # rounded up one assert req.params["period2"] == str(timestamp("2021-01-09")) # rounded up one
assert hist_req.params["interval"] == "1d" assert req.params["interval"] == "1d"
assert series.prices[0] == Price("2021-01-04", Decimal("729.770020")) assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
assert series.prices[-1] == Price("2021-01-08", Decimal("880.020020")) assert series.prices[-1] == Price("2021-01-08", Decimal("293.339996337890625"))
def test_fetch_ignores_any_extra_row(src, type, spark_ok, recent_ok): def test_fetch_ignores_any_extra_row(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-07")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-07"))
assert series.prices[0] == Price("2021-01-04", Decimal("729.770020")) assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
assert series.prices[-1] == Price("2021-01-07", Decimal("816.039978")) assert series.prices[-1] == Price("2021-01-07", Decimal("272.013336181640625"))
def test_fetch_requests_logged(src, type, spark_ok, recent_ok, caplog): def test_fetch_requests_logged(src, type, recent_ok, caplog):
with caplog.at_level(logging.DEBUG): with caplog.at_level(logging.DEBUG):
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
logged_requests = 0 logged_requests = 0
for r in caplog.records: for r in caplog.records:
if r.levelname == "DEBUG" and "curl " in r.message: if r.levelname == "DEBUG" and "curl " in r.message:
logged_requests += 1 logged_requests += 1
assert logged_requests == 2 assert logged_requests == 1
def test_fetch_types_all_available(src, spark_ok, recent_ok): def test_fetch_types_all_available(src, recent_ok):
adj = src.fetch(Series("TSLA", "", "adjclose", "2021-01-04", "2021-01-08")) adj = src.fetch(Series("TSLA", "", "adjclose", "2021-01-04", "2021-01-08"))
opn = src.fetch(Series("TSLA", "", "open", "2021-01-04", "2021-01-08")) opn = src.fetch(Series("TSLA", "", "open", "2021-01-04", "2021-01-08"))
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")) hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08"))
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")) low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08"))
cls = src.fetch(Series("TSLA", "", "close", "2021-01-04", "2021-01-08")) cls = src.fetch(Series("TSLA", "", "close", "2021-01-04", "2021-01-08"))
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")) mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08"))
assert adj.prices[0].amount == Decimal("729.770020") assert adj.prices[0].amount == Decimal("243.2566680908203125")
assert opn.prices[0].amount == Decimal("719.460022") assert opn.prices[0].amount == Decimal("239.82000732421875")
assert hgh.prices[0].amount == Decimal("744.489990") assert hgh.prices[0].amount == Decimal("248.163330078125")
assert low.prices[0].amount == Decimal("717.190002") assert low.prices[0].amount == Decimal("239.0633392333984375")
assert cls.prices[0].amount == Decimal("729.770020") assert cls.prices[0].amount == Decimal("243.2566680908203125")
assert mid.prices[0].amount == Decimal("730.839996") assert mid.prices[0].amount == Decimal("243.61333465576171875")
def test_fetch_type_mid_is_mean_of_low_and_high(src, spark_ok, recent_ok): def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_ok):
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")).prices mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")).prices
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")).prices hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")).prices
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")).prices low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")).prices
@ -169,29 +157,29 @@ def test_fetch_type_mid_is_mean_of_low_and_high(src, spark_ok, recent_ok):
) )
def test_fetch_from_before_start(src, type, spark_ok, long_ok): def test_fetch_from_before_start(src, type, long_ok):
series = src.fetch(Series("IBM", "", type, "1900-01-01", "2021-01-08")) series = src.fetch(Series("IBM", "", type, "1900-01-01", "2021-01-08"))
assert series.prices[0] == Price("1962-01-02", Decimal("1.837710")) assert series.prices[0] == Price("1962-01-02", Decimal("1.5133211612701416015625"))
assert series.prices[-1] == Price("2021-01-08", Decimal("125.433624")) assert series.prices[-1] == Price("2021-01-08", Decimal("103.2923736572265625"))
assert len(series.prices) > 9 assert len(series.prices) > 9
def test_fetch_skips_dates_with_nulls(src, type, spark_ok, date_with_nulls_ok): def test_fetch_skips_dates_with_nulls(src, type, with_null_ok):
series = src.fetch(Series("IBM", "", type, "2021-01-05", "2021-01-07")) series = src.fetch(Series("INR=X", "", type, "2017-07-10", "2017-07-12"))
assert series.prices[0] == Price("2021-01-05", Decimal("123.101204")) assert series.prices[0] == Price("2017-07-10", Decimal("64.61170196533203125"))
assert series.prices[1] == Price("2021-01-07", Decimal("125.882545")) assert series.prices[1] == Price("2017-07-12", Decimal("64.52559661865234375"))
assert len(series.prices) == 2 assert len(series.prices) == 2
def test_fetch_to_future(src, type, spark_ok, recent_ok): def test_fetch_to_future(src, type, recent_ok):
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08")) series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08"))
assert len(series.prices) > 0 assert len(series.prices) > 0
def test_fetch_no_data_in_past(src, type, spark_ok, requests_mock): def test_fetch_no_data_in_past(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=400, status=400,
body=( body=(
"400 Bad Request: Data doesn't exist for " "400 Bad Request: Data doesn't exist for "
@ -203,10 +191,10 @@ def test_fetch_no_data_in_past(src, type, spark_ok, requests_mock):
assert "No data for the given interval" in str(e.value) assert "No data for the given interval" in str(e.value)
def test_fetch_no_data_in_future(src, type, spark_ok, requests_mock): def test_fetch_no_data_in_future(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=400, status=400,
body=( body=(
"400 Bad Request: Data doesn't exist for " "400 Bad Request: Data doesn't exist for "
@ -218,10 +206,10 @@ def test_fetch_no_data_in_future(src, type, spark_ok, requests_mock):
assert "No data for the given interval" in str(e.value) assert "No data for the given interval" in str(e.value)
def test_fetch_no_data_on_weekend(src, type, spark_ok, requests_mock): def test_fetch_no_data_on_weekend(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
history_url("TSLA"), url("TSLA"),
status=404, status=404,
body="404 Not Found: Timestamp data missing.", body="404 Not Found: Timestamp data missing.",
) )
@ -233,30 +221,7 @@ def test_fetch_no_data_on_weekend(src, type, spark_ok, requests_mock):
def test_fetch_bad_sym(src, type, requests_mock): def test_fetch_bad_sym(src, type, requests_mock):
requests_mock.add( requests_mock.add(
responses.GET, responses.GET,
spark_url, url("NOTABASE"),
status=404,
body="""{
"spark": {
"result": null,
"error": {
"code": "Not Found",
"description": "No data found for spark symbols"
}
}
}""",
)
with pytest.raises(exceptions.InvalidPair) as e:
src.fetch(Series("NOTABASE", "", type, "2021-01-04", "2021-01-08"))
assert "Symbol not found" in str(e.value)
def test_fetch_bad_sym_history(src, type, spark_ok, requests_mock):
# In practice the spark history requests should succeed or fail together.
# This extra test ensures that a failure of the the history part is handled
# correctly even if the spark part succeeds.
requests_mock.add(
responses.GET,
history_url("NOTABASE"),
status=404, status=404,
body="404 Not Found: No data found, symbol may be delisted", body="404 Not Found: No data found, symbol may be delisted",
) )
@ -271,61 +236,23 @@ def test_fetch_giving_quote(src, type):
assert "quote currency" in str(e.value) assert "quote currency" in str(e.value)
def test_fetch_spark_network_issue(src, type, requests_mock): def test_fetch_network_issue(src, type, requests_mock):
body = requests.exceptions.ConnectionError("Network issue") body = requests.exceptions.ConnectionError("Network issue")
requests_mock.add(responses.GET, spark_url, body=body) requests_mock.add(responses.GET, url("TSLA"), body=body)
with pytest.raises(exceptions.RequestError) as e: with pytest.raises(exceptions.RequestError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Network issue" in str(e.value) assert "Network issue" in str(e.value)
def test_fetch_spark_bad_status(src, type, requests_mock): def test_fetch_bad_status(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, status=500, body="Some other reason") requests_mock.add(responses.GET, url("TSLA"), status=500, body="Some other reason")
with pytest.raises(exceptions.BadResponse) as e: with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Internal Server Error" in str(e.value) assert "Internal Server Error" in str(e.value)
def test_fetch_spark_parsing_error(src, type, requests_mock): def test_fetch_parsing_error(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, body="NOT JSON") requests_mock.add(responses.GET, url("TSLA"), body="")
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "spark data couldn't be parsed" in str(e.value)
def test_fetch_spark_unexpected_json(src, type, requests_mock):
requests_mock.add(responses.GET, spark_url, body='{"notdata": []}')
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "spark data couldn't be parsed" in str(e.value)
def test_fetch_history_network_issue(src, type, spark_ok, requests_mock):
body = requests.exceptions.ConnectionError("Network issue")
requests_mock.add(responses.GET, history_url("TSLA"), body=body)
with pytest.raises(exceptions.RequestError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Network issue" in str(e.value)
def test_fetch_history_bad_status(src, type, spark_ok, requests_mock):
requests_mock.add(
responses.GET, history_url("TSLA"), status=500, body="Some other reason"
)
with pytest.raises(exceptions.BadResponse) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Internal Server Error" in str(e.value)
def test_fetch_history_parsing_error(src, type, spark_ok, requests_mock):
requests_mock.add(responses.GET, history_url("TSLA"), body="")
with pytest.raises(exceptions.ResponseParsingError) as e: with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08")) src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "error occurred while parsing data from the source" in str(e.value) assert "error occurred while parsing data from the source" in str(e.value)
def test_fetch_history_unexpected_csv_format(src, type, spark_ok, requests_mock):
requests_mock.add(responses.GET, history_url("TSLA"), body="BAD HEADER\nBAD DATA")
with pytest.raises(exceptions.ResponseParsingError) as e:
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
assert "Unexpected CSV format" in str(e.value)

View file

@ -1,4 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
2021-01-05,125.010002,126.680000,124.610001,126.139999,123.101204,6114600
2021-01-06,null,null,null,null,null,null
2021-01-07,130.039993,130.460007,128.259995,128.990005,125.882545,4507400
1 Date Open High Low Close Adj Close Volume
2 2021-01-05 125.010002 126.680000 124.610001 126.139999 123.101204 6114600
3 2021-01-06 null null null null null null
4 2021-01-07 130.039993 130.460007 128.259995 128.990005 125.882545 4507400

View file

@ -1,11 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
1962-01-02,7.713333,7.713333,7.626667,7.626667,1.837710,390000
1962-01-03,7.626667,7.693333,7.626667,7.693333,1.853774,292500
1962-01-04,7.693333,7.693333,7.613333,7.616667,1.835299,262500
1962-01-05,7.606667,7.606667,7.453333,7.466667,1.799155,367500
1962-01-08,7.460000,7.460000,7.266667,7.326667,1.765422,547500
2021-01-04,125.849998,125.919998,123.040001,123.940002,120.954201,5179200
2021-01-05,125.010002,126.680000,124.610001,126.139999,123.101204,6114600
2021-01-06,126.900002,131.880005,126.720001,129.289993,126.175316,7956700
2021-01-07,130.039993,130.460007,128.259995,128.990005,125.882545,4507400
2021-01-08,128.570007,129.320007,126.980003,128.529999,125.433624,4676200
1 Date Open High Low Close Adj Close Volume
2 1962-01-02 7.713333 7.713333 7.626667 7.626667 1.837710 390000
3 1962-01-03 7.626667 7.693333 7.626667 7.693333 1.853774 292500
4 1962-01-04 7.693333 7.693333 7.613333 7.616667 1.835299 262500
5 1962-01-05 7.606667 7.606667 7.453333 7.466667 1.799155 367500
6 1962-01-08 7.460000 7.460000 7.266667 7.326667 1.765422 547500
7 2021-01-04 125.849998 125.919998 123.040001 123.940002 120.954201 5179200
8 2021-01-05 125.010002 126.680000 124.610001 126.139999 123.101204 6114600
9 2021-01-06 126.900002 131.880005 126.720001 129.289993 126.175316 7956700
10 2021-01-07 130.039993 130.460007 128.259995 128.990005 125.882545 4507400
11 2021-01-08 128.570007 129.320007 126.980003 128.529999 125.433624 4676200

View file

@ -0,0 +1,249 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "USD",
"symbol": "IBM",
"exchangeName": "NYQ",
"fullExchangeName": "NYSE",
"instrumentType": "EQUITY",
"firstTradeDate": -252322200,
"regularMarketTime": 1726257602,
"hasPrePostMarketData": true,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 214.79,
"fiftyTwoWeekHigh": 216.08,
"fiftyTwoWeekLow": 212.13,
"regularMarketDayHigh": 216.08,
"regularMarketDayLow": 212.13,
"regularMarketVolume": 4553547,
"longName": "International Business Machines Corporation",
"shortName": "International Business Machines",
"chartPreviousClose": 7.291,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"end": 1726234200,
"start": 1726214400,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"end": 1726257600,
"start": 1726234200,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"end": 1726272000,
"start": 1726257600,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
-252322200,
-252235800,
-252149400,
-252063000,
-251803800,
1609770600,
1609857000,
1609943400,
1610029800,
1610116200
],
"events": {
"dividends": {
"-249298200": {
"amount": 0.000956,
"date": -249298200
},
"-241439400": {
"amount": 0.000956,
"date": -241439400
},
"-233577000": {
"amount": 0.000956,
"date": -233577000
},
"-225797400": {
"amount": 0.000956,
"date": -225797400
},
"-217848600": {
"amount": 0.001275,
"date": -217848600
},
"1573137000": {
"amount": 1.548757,
"date": 1573137000
},
"1581085800": {
"amount": 1.548757,
"date": 1581085800
},
"1588858200": {
"amount": 1.558317,
"date": 1588858200
},
"1596807000": {
"amount": 1.558317,
"date": 1596807000
},
"1604932200": {
"amount": 1.558317,
"date": 1604932200
}
},
"splits": {
"-177417000": {
"date": -177417000,
"numerator": 5.0,
"denominator": 4.0,
"splitRatio": "5:4"
},
"-114345000": {
"date": -114345000,
"numerator": 3.0,
"denominator": 2.0,
"splitRatio": "3:2"
},
"-53343000": {
"date": -53343000,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
},
"107530200": {
"date": 107530200,
"numerator": 5.0,
"denominator": 4.0,
"splitRatio": "5:4"
},
"297091800": {
"date": 297091800,
"numerator": 4.0,
"denominator": 1.0,
"splitRatio": "4:1"
},
"864826200": {
"date": 864826200,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
},
"927811800": {
"date": 927811800,
"numerator": 2.0,
"denominator": 1.0,
"splitRatio": "2:1"
}
}
},
"indicators": {
"quote": [
{
"close": [
7.2912678718566895,
7.3550028800964355,
7.281707763671875,
7.138305187225342,
7.00446081161499,
118.48948669433594,
120.59273529052734,
123.60420989990234,
123.31739807128906,
122.87763214111328
],
"low": [
7.2912678718566895,
7.2912678718566895,
7.2785210609436035,
7.125557899475098,
6.9471001625061035,
117.62906646728516,
119.13002014160156,
121.14722442626953,
122.61949920654297,
121.39579010009766
],
"open": [
7.374124050140381,
7.2912678718566895,
7.3550028800964355,
7.272148132324219,
7.131930828094482,
120.31549072265625,
119.5124282836914,
121.3193130493164,
124.32122039794922,
122.9158706665039
],
"high": [
7.374124050140381,
7.3550028800964355,
7.3550028800964355,
7.272148132324219,
7.131930828094482,
120.38240814208984,
121.1089859008789,
126.08030700683594,
124.7227554321289,
123.63288879394531
],
"volume": [
407940,
305955,
274575,
384405,
572685,
5417443,
6395872,
8322708,
4714740,
4891305
]
}
],
"adjclose": [
{
"adjclose": [
1.5133211612701416,
1.5265485048294067,
1.5113375186920166,
1.4815733432769775,
1.4537923336029053,
99.60364532470703,
101.37164306640625,
103.90313720703125,
103.66202545166016,
103.29237365722656
]
}
]
}
}
],
"error": null
}
}

View file

@ -0,0 +1,119 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "INR",
"symbol": "INR=X",
"exchangeName": "CCY",
"fullExchangeName": "CCY",
"instrumentType": "CURRENCY",
"firstTradeDate": 1070236800,
"regularMarketTime": 1726284616,
"hasPrePostMarketData": false,
"gmtoffset": 3600,
"timezone": "BST",
"exchangeTimezoneName": "Europe/London",
"regularMarketPrice": 83.89,
"fiftyTwoWeekHigh": 83.89,
"fiftyTwoWeekLow": 83.89,
"regularMarketDayHigh": 83.89,
"regularMarketDayLow": 83.89,
"regularMarketVolume": 0,
"longName": "USD/INR",
"shortName": "USD/INR",
"chartPreviousClose": 64.6117,
"priceHint": 4,
"currentTradingPeriod": {
"pre": {
"timezone": "BST",
"start": 1726182000,
"end": 1726182000,
"gmtoffset": 3600
},
"regular": {
"timezone": "BST",
"start": 1726182000,
"end": 1726268340,
"gmtoffset": 3600
},
"post": {
"timezone": "BST",
"start": 1726268340,
"end": 1726268340,
"gmtoffset": 3600
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1499641200,
1499727600,
1499814000,
1499900400
],
"indicators": {
"quote": [
{
"open": [
64.6155014038086,
null,
64.55549621582031,
64.46800231933594
],
"volume": [
0,
null,
0,
0
],
"low": [
64.41000366210938,
null,
64.3499984741211,
64.33999633789062
],
"close": [
64.61170196533203,
null,
64.52559661865234,
64.36499786376953
],
"high": [
64.6155014038086,
null,
64.56999969482422,
64.48419952392578
]
}
],
"adjclose": [
{
"adjclose": [
64.61170196533203,
null,
64.52559661865234,
64.36499786376953
]
}
]
}
}
],
"error": null
}
}

View file

@ -1,6 +0,0 @@
Date,Open,High,Low,Close,Adj Close,Volume
2021-01-04,719.460022,744.489990,717.190002,729.770020,729.770020,48638200
2021-01-05,723.659973,740.840027,719.200012,735.109985,735.109985,32245200
2021-01-06,758.489990,774.000000,749.099976,755.979980,755.979980,44700000
2021-01-07,777.630005,816.989990,775.200012,816.039978,816.039978,51498900
2021-01-08,856.000000,884.489990,838.390015,880.020020,880.020020,75055500
1 Date Open High Low Close Adj Close Volume
2 2021-01-04 719.460022 744.489990 717.190002 729.770020 729.770020 48638200
3 2021-01-05 723.659973 740.840027 719.200012 735.109985 735.109985 32245200
4 2021-01-06 758.489990 774.000000 749.099976 755.979980 755.979980 44700000
5 2021-01-07 777.630005 816.989990 775.200012 816.039978 816.039978 51498900
6 2021-01-08 856.000000 884.489990 838.390015 880.020020 880.020020 75055500

View file

@ -0,0 +1,126 @@
{
"chart": {
"result": [
{
"meta": {
"currency": "USD",
"symbol": "TSLA",
"exchangeName": "NMS",
"fullExchangeName": "NasdaqGS",
"instrumentType": "EQUITY",
"firstTradeDate": 1277818200,
"regularMarketTime": 1726257600,
"hasPrePostMarketData": true,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 230.29,
"fiftyTwoWeekHigh": 232.664,
"fiftyTwoWeekLow": 226.32,
"regularMarketDayHigh": 232.664,
"regularMarketDayLow": 226.32,
"regularMarketVolume": 59096538,
"longName": "Tesla, Inc.",
"shortName": "Tesla, Inc.",
"chartPreviousClose": 235.223,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"start": 1726214400,
"end": 1726234200,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"start": 1726234200,
"end": 1726257600,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"start": 1726257600,
"end": 1726272000,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1609770600,
1609857000,
1609943400,
1610029800,
1610116200
],
"indicators": {
"quote": [
{
"open": [
239.82000732421875,
241.22000122070312,
252.8300018310547,
259.2099914550781,
285.3333435058594
],
"close": [
243.2566680908203,
245.0366668701172,
251.9933319091797,
272.0133361816406,
293.3399963378906
],
"high": [
248.163330078125,
246.94667053222656,
258.0,
272.3299865722656,
294.8299865722656
],
"low": [
239.06333923339844,
239.73333740234375,
249.6999969482422,
258.3999938964844,
279.46331787109375
],
"volume": [
145914600,
96735600,
134100000,
154496700,
225166500
]
}
],
"adjclose": [
{
"adjclose": [
243.2566680908203,
245.0366668701172,
251.9933319091797,
272.0133361816406,
293.3399963378906
]
}
]
}
}
],
"error": null
}
}

View file

@ -1,77 +0,0 @@
{
"spark": {
"result": [
{
"symbol": "TSLA",
"response": [
{
"meta": {
"currency": "USD",
"symbol": "TSLA",
"exchangeName": "NMS",
"instrumentType": "EQUITY",
"firstTradeDate": 1277818200,
"regularMarketTime": 1626465603,
"gmtoffset": -14400,
"timezone": "EDT",
"exchangeTimezoneName": "America/New_York",
"regularMarketPrice": 644.22,
"chartPreviousClose": 650.6,
"priceHint": 2,
"currentTradingPeriod": {
"pre": {
"timezone": "EDT",
"start": 1626422400,
"end": 1626442200,
"gmtoffset": -14400
},
"regular": {
"timezone": "EDT",
"start": 1626442200,
"end": 1626465600,
"gmtoffset": -14400
},
"post": {
"timezone": "EDT",
"start": 1626465600,
"end": 1626480000,
"gmtoffset": -14400
}
},
"dataGranularity": "1d",
"range": "1d",
"validRanges": [
"1d",
"5d",
"1mo",
"3mo",
"6mo",
"1y",
"2y",
"5y",
"10y",
"ytd",
"max"
]
},
"timestamp": [
1626442200,
1626465603
],
"indicators": {
"quote": [
{
"close": [
644.22,
644.22
]
}
]
}
}
]
}
],
"error": null
}
}