Improve alphavantage error handling and add tests.
This commit is contained in:
parent
99761ebbd1
commit
d3931310a2
9 changed files with 1185 additions and 44 deletions
|
@ -28,10 +28,32 @@ class InvalidType(SourceError, ValueError):
|
|||
self.pair = "/".join([s for s in [base, quote] if s])
|
||||
message = (
|
||||
f"Invalid price type '{type}' for pair '{self.pair}'. "
|
||||
f"Run 'pricehist source {source.id()} "
|
||||
f"Run 'pricehist source {source.id()}' "
|
||||
f"for information about valid types."
|
||||
)
|
||||
super(InvalidPair, self).__init__(message)
|
||||
super(InvalidType, self).__init__(message)
|
||||
|
||||
|
||||
class CredentialsError(SourceError):
|
||||
"""Access credentials are unavailable or invalid."""
|
||||
|
||||
def __init__(self, keys, source):
|
||||
self.keys = keys
|
||||
self.source = source
|
||||
message = (
|
||||
f"Access credentials for source '{source.id()}' are unavailable "
|
||||
f"""or invalid. Set the environment variables '{"', '".join(keys)}' """
|
||||
f"correctly. Run 'pricehist source {source.id()}' for more "
|
||||
f"information about credentials."
|
||||
)
|
||||
super(CredentialsError, self).__init__(message)
|
||||
|
||||
|
||||
class RateLimit(SourceError):
|
||||
"""Source request rate limit reached."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(RateLimit, self).__init__(f"{self.__doc__} {message}")
|
||||
|
||||
|
||||
class RequestError(SourceError):
|
||||
|
|
|
@ -8,6 +8,7 @@ from decimal import Decimal
|
|||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -85,20 +86,38 @@ class AlphaVantage(BaseSource):
|
|||
output_quote = series.quote
|
||||
|
||||
if series.quote == "":
|
||||
output_quote = self._stock_currency(output_base)
|
||||
data = self._stock_data(series)
|
||||
output_quote, data = self._stock_data(series)
|
||||
else:
|
||||
if series.type == "adjclose":
|
||||
logging.critical(
|
||||
"The 'adjclose' price type is only available for stocks. "
|
||||
"Use 'close' instead."
|
||||
raise exceptions.InvalidType(
|
||||
series.type, series.base, series.quote, self
|
||||
)
|
||||
exit(1)
|
||||
elif series.base in [s for s, n in self._physical_symbols()]:
|
||||
|
||||
physical_symbols = [s for s, n in self._physical_symbols()]
|
||||
|
||||
if series.quote not in physical_symbols:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base,
|
||||
series.quote,
|
||||
self,
|
||||
"When given, the quote must be a physical currency.",
|
||||
)
|
||||
|
||||
if series.base in physical_symbols:
|
||||
data = self._physical_data(series)
|
||||
else:
|
||||
|
||||
elif series.base in [s for s, n in self._digital_symbols()]:
|
||||
data = self._digital_data(series)
|
||||
|
||||
else:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base,
|
||||
series.quote,
|
||||
self,
|
||||
"When a quote currency is given, the base must be a known "
|
||||
"physical or digital currency.",
|
||||
)
|
||||
|
||||
prices = [
|
||||
Price(day, amount)
|
||||
for day, entries in data.items()
|
||||
|
@ -112,7 +131,7 @@ class AlphaVantage(BaseSource):
|
|||
def _amount(self, day, entries, series):
|
||||
if day < series.start or day > series.end:
|
||||
return None
|
||||
elif type == "mid":
|
||||
elif series.type == "mid":
|
||||
return sum([Decimal(entries["high"]), Decimal(entries["low"])]) / 2
|
||||
else:
|
||||
return Decimal(entries[series.type])
|
||||
|
@ -122,7 +141,7 @@ class AlphaVantage(BaseSource):
|
|||
for match in data["bestMatches"]:
|
||||
if match["1. symbol"] == symbol:
|
||||
return match["8. currency"]
|
||||
return "Unknown"
|
||||
return None
|
||||
|
||||
def _search_data(self, keywords: str):
|
||||
params = {
|
||||
|
@ -130,19 +149,73 @@ class AlphaVantage(BaseSource):
|
|||
"keywords": keywords,
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if type(data) == dict and "Note" in data and "call frequency" in data["Note"]:
|
||||
raise exceptions.RateLimit(data["Note"])
|
||||
|
||||
expected_keys = ["1. symbol", "2. name", "3. type", "4. region", "8. currency"]
|
||||
if (
|
||||
type(data) != dict
|
||||
or "bestMatches" not in data
|
||||
or type(data["bestMatches"]) != list
|
||||
or not all(k in m for k in expected_keys for m in data["bestMatches"])
|
||||
):
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
return data
|
||||
|
||||
def _stock_data(self, series):
|
||||
output_quote = self._stock_currency(series.base) or "UNKNOWN"
|
||||
|
||||
params = {
|
||||
"function": "TIME_SERIES_DAILY_ADJUSTED",
|
||||
"symbol": series.base,
|
||||
"outputsize": self._outputsize(series.start),
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if type(data) == dict and "Note" in data and "call frequency" in data["Note"]:
|
||||
raise exceptions.RateLimit(data["Note"])
|
||||
|
||||
if "Error Message" in data:
|
||||
if output_quote == "UNKNOWN":
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Unknown stock symbol."
|
||||
)
|
||||
else:
|
||||
raise exceptions.BadResponse(data["Error Message"])
|
||||
|
||||
try:
|
||||
normalized_data = {
|
||||
day: {
|
||||
"open": entries["1. open"],
|
||||
|
@ -153,7 +226,10 @@ class AlphaVantage(BaseSource):
|
|||
}
|
||||
for day, entries in reversed(data["Time Series (Daily)"].items())
|
||||
}
|
||||
return normalized_data
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.") from e
|
||||
|
||||
return output_quote, normalized_data
|
||||
|
||||
def _physical_data(self, series):
|
||||
params = {
|
||||
|
@ -163,8 +239,28 @@ class AlphaVantage(BaseSource):
|
|||
"outputsize": self._outputsize(series.start),
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if type(data) == dict and "Note" in data and "call frequency" in data["Note"]:
|
||||
raise exceptions.RateLimit(data["Note"])
|
||||
|
||||
if type(data) != dict or "Time Series FX (Daily)" not in data:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
normalized_data = {
|
||||
day: {k[3:]: v for k, v in entries.items()}
|
||||
for day, entries in reversed(data["Time Series FX (Daily)"].items())
|
||||
|
@ -185,8 +281,28 @@ class AlphaVantage(BaseSource):
|
|||
"market": series.quote,
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if type(data) == dict and "Note" in data and "call frequency" in data["Note"]:
|
||||
raise exceptions.RateLimit(data["Note"])
|
||||
|
||||
if type(data) != dict or "Time Series (Digital Currency Daily)" not in data:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
normalized_data = {
|
||||
day: {
|
||||
"open": entries[f"1a. open ({series.quote})"],
|
||||
|
@ -204,24 +320,36 @@ class AlphaVantage(BaseSource):
|
|||
key_name = "ALPHAVANTAGE_API_KEY"
|
||||
key = os.getenv(key_name)
|
||||
if require and not key:
|
||||
logging.critical(
|
||||
f"The environment variable {key_name} is empty. "
|
||||
"Get a free API key from https://www.alphavantage.co/support/#api-key, "
|
||||
f'export {key_name}="YOUR_OWN_API_KEY" and retry.'
|
||||
)
|
||||
exit(1)
|
||||
raise exceptions.CredentialsError([key_name], self)
|
||||
return key
|
||||
|
||||
def _physical_symbols(self) -> list[(str, str)]:
|
||||
url = "https://www.alphavantage.co/physical_currency_list/"
|
||||
response = self.log_curl(requests.get(url))
|
||||
lines = response.content.decode("utf-8").splitlines()
|
||||
data = csv.reader(lines[1:], delimiter=",")
|
||||
return [(s, f"Physical: {n}") for s, n in data]
|
||||
return self._get_symbols(url, "Physical: ")
|
||||
|
||||
def _digital_symbols(self) -> list[(str, str)]:
|
||||
url = "https://www.alphavantage.co/digital_currency_list/"
|
||||
return self._get_symbols(url, "Digital: ")
|
||||
|
||||
def _get_symbols(self, url, prefix) -> list[(str, str)]:
|
||||
try:
|
||||
response = self.log_curl(requests.get(url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
lines = response.content.decode("utf-8").splitlines()
|
||||
data = csv.reader(lines[1:], delimiter=",")
|
||||
return [(s, f"Digital: {n}") for s, n in data]
|
||||
results = [(s, f"{prefix}{n}") for s, n in data]
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if len(results) == 0:
|
||||
raise exceptions.ResponseParsingError("Symbols data missing.")
|
||||
|
||||
return results
|
||||
|
|
632
tests/pricehist/sources/test_alphavantage.py
Normal file
632
tests/pricehist/sources/test_alphavantage.py
Normal file
|
@ -0,0 +1,632 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.alphavantage import AlphaVantage
|
||||
|
||||
api_key_name = "ALPHAVANTAGE_API_KEY"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_settings_env_vars(monkeypatch):
|
||||
value = "NOTAREALKEY12345"
|
||||
if not os.getenv(api_key_name):
|
||||
monkeypatch.setenv(api_key_name, value, prepend=False)
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return AlphaVantage()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
physical_list_url = "https://www.alphavantage.co/physical_currency_list/"
|
||||
digital_list_url = "https://www.alphavantage.co/digital_currency_list/"
|
||||
|
||||
search_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=SYMBOL_SEARCH.*"
|
||||
)
|
||||
stock_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY_ADJUSTED.*"
|
||||
)
|
||||
physical_url = re.compile(r"https://www\.alphavantage\.co/query\?function=FX_DAILY.*")
|
||||
digital_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=DIGITAL_CURRENCY_DAILY.*"
|
||||
)
|
||||
|
||||
rate_limit_json = (
|
||||
'{ "Note": "'
|
||||
"Thank you for using Alpha Vantage! Our standard API call frequency is 5 "
|
||||
"calls per minute and 500 calls per day. Please visit "
|
||||
"https://www.alphavantage.co/premium/ if you would like to target a higher "
|
||||
"API call frequency."
|
||||
'" }'
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def physical_list_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "physical-partial.csv").read_text()
|
||||
requests_mock.add(responses.GET, physical_list_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def digital_list_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "digital-partial.csv").read_text()
|
||||
requests_mock.add(responses.GET, digital_list_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "search-ibm.json").read_text()
|
||||
requests_mock.add(responses.GET, search_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_not_found(requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body='{"bestMatches":[]}', status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ibm_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "ibm-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, stock_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def euraud_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "eur-aud-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, physical_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def btcaud_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "btc-aud-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, digital_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("tsla") == "TSLA"
|
||||
assert src.normalizesymbol("btc") == "BTC"
|
||||
assert src.normalizesymbol("eur") == "EUR"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols_stock_message(src, physical_list_ok, digital_list_ok, caplog):
|
||||
with caplog.at_level(logging.INFO):
|
||||
src.symbols()
|
||||
assert any(["Stock symbols can be discovered" in r.message for r in caplog.records])
|
||||
|
||||
|
||||
def test_symbols(src, physical_list_ok, digital_list_ok):
|
||||
syms = src.symbols()
|
||||
assert ("BTC", "Digital: Bitcoin") in syms
|
||||
assert ("AUD", "Physical: Australian Dollar") in syms
|
||||
assert len(syms) > 2
|
||||
|
||||
|
||||
def test_symbols_digital_network_issue(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
digital_list_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_bad_status(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_no_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, body="NOT CSV", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Symbols data missing." in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_bad_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, body="A,B,C\na,b,c", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "too many values" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_network_issue(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
physical_list_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_bad_status(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_list_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_no_data(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_list_url, body="", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Symbols data missing." in str(e.value)
|
||||
|
||||
|
||||
def test_search(src, search_ok):
|
||||
results = src.search("IBM")
|
||||
req = search_ok.calls[0].request
|
||||
assert req.params["function"] == "SYMBOL_SEARCH"
|
||||
assert req.params["keywords"] == "IBM"
|
||||
assert len(req.params["apikey"]) > 0
|
||||
assert len(results) == 10
|
||||
for expected in [
|
||||
("IBM", "International Business Machines Corp, Equity, United States, USD"),
|
||||
("IBMJ", "iShares iBonds Dec 2021 Term Muni Bond ETF, ETF, United States, USD"),
|
||||
("IBMK", "iShares iBonds Dec 2022 Term Muni Bond ETF, ETF, United States, USD"),
|
||||
("IBM.DEX", "International Business Machines Corporation, Equity, XETRA, EUR"),
|
||||
]:
|
||||
assert expected in results
|
||||
|
||||
|
||||
def test_search_network_issue(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
search_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.search("IBM")
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_status(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.search("IBM")
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body="NOT JSON", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_json(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body="{}", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "Unexpected content." in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_json_tricky(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET, search_url, body='{"bestMatches": [{}]}', status=200
|
||||
)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "Unexpected content." in str(e.value)
|
||||
|
||||
|
||||
def test_search_rate_limit(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_known(src, type, search_ok, ibm_ok):
|
||||
series = src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
search_req = search_ok.calls[0].request
|
||||
stock_req = ibm_ok.calls[1].request
|
||||
assert search_req.params["function"] == "SYMBOL_SEARCH"
|
||||
assert search_req.params["keywords"] == "IBM"
|
||||
assert stock_req.params["function"] == "TIME_SERIES_DAILY_ADJUSTED"
|
||||
assert stock_req.params["symbol"] == "IBM"
|
||||
assert stock_req.params["outputsize"] == "full"
|
||||
assert (series.base, series.quote) == ("IBM", "USD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("123.94"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("128.53"))
|
||||
|
||||
|
||||
def test_fetch_stock_compact_if_recent(src, type, search_ok, ibm_ok):
|
||||
today = datetime.now().date()
|
||||
start = (today - timedelta(days=30)).isoformat()
|
||||
end = today.isoformat()
|
||||
src.fetch(Series("IBM", "", type, start, end))
|
||||
stock_req = ibm_ok.calls[1].request
|
||||
assert stock_req.params["outputsize"] == "compact"
|
||||
|
||||
|
||||
def test_fetch_stock_requests_logged(src, type, search_ok, ibm_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and " curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 2
|
||||
|
||||
|
||||
def test_fetch_stock_types_all_available(src, search_ok, ibm_ok):
|
||||
cls = src.fetch(Series("IBM", "", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("IBM", "", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("IBM", "", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("IBM", "", "low", "2021-01-04", "2021-01-08"))
|
||||
adj = src.fetch(Series("IBM", "", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("IBM", "", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("123.94")
|
||||
assert opn.prices[0].amount == Decimal("125.85")
|
||||
assert hgh.prices[0].amount == Decimal("125.9174")
|
||||
assert low.prices[0].amount == Decimal("123.04")
|
||||
assert adj.prices[0].amount == Decimal("120.943645029")
|
||||
assert mid.prices[0].amount == Decimal("124.4787")
|
||||
|
||||
|
||||
def test_fetch_stock_type_mid_is_mean_of_low_and_high(src, search_ok, ibm_ok):
|
||||
hgh = src.fetch(Series("IBM", "", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("IBM", "", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("IBM", "", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_stock_bad_sym(src, type, search_not_found, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
stock_url,
|
||||
status=200,
|
||||
body="""{
|
||||
"Error Message": "Invalid API call. Please retry or..."
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTASTOCK", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unknown stock symbol" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_quote_found_prices_error(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
stock_url,
|
||||
status=200,
|
||||
body="""{
|
||||
"Error Message": "Invalid API call. Please retry or..."
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "bad response" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_network_issue(src, type, search_ok, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, stock_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_bad_status(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_parsing_error(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_unexpected_json(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_rate_limit(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_known(src, type, physical_list_ok, euraud_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
req = euraud_ok.calls[1].request
|
||||
assert req.params["function"] == "FX_DAILY"
|
||||
assert req.params["from_symbol"] == "EUR"
|
||||
assert req.params["to_symbol"] == "AUD"
|
||||
assert req.params["outputsize"] == "full"
|
||||
assert (series.base, series.quote) == ("EUR", "AUD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("1.59718"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("1.57350"))
|
||||
|
||||
|
||||
def test_fetch_physical_compact_if_recent(src, type, physical_list_ok, euraud_ok):
|
||||
today = datetime.now().date()
|
||||
start = (today - timedelta(days=30)).isoformat()
|
||||
end = today.isoformat()
|
||||
src.fetch(Series("EUR", "AUD", type, start, end))
|
||||
req = euraud_ok.calls[1].request
|
||||
assert req.params["outputsize"] == "compact"
|
||||
|
||||
|
||||
def test_fetch_physical_requests_logged(src, type, physical_list_ok, euraud_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and " curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 2
|
||||
|
||||
|
||||
def test_fetch_physical_types_but_adjclose_available(src, physical_list_ok, euraud_ok):
|
||||
cls = src.fetch(Series("EUR", "AUD", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("EUR", "AUD", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("EUR", "AUD", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("EUR", "AUD", "low", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("EUR", "AUD", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("1.59718")
|
||||
assert opn.prices[0].amount == Decimal("1.58741")
|
||||
assert hgh.prices[0].amount == Decimal("1.60296")
|
||||
assert low.prices[0].amount == Decimal("1.58550")
|
||||
assert mid.prices[0].amount == Decimal("1.59423")
|
||||
|
||||
|
||||
def test_fetch_physical_adjclose_not_available(src):
|
||||
with pytest.raises(exceptions.InvalidType) as e:
|
||||
src.fetch(Series("EUR", "AUD", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert "Invalid price type 'adjclose' for pair 'EUR/AUD'." in str(e)
|
||||
|
||||
|
||||
def test_fetch_physical_type_mid_is_mean_of_low_and_high(
|
||||
src, physical_list_ok, euraud_ok
|
||||
):
|
||||
hgh = src.fetch(Series("EUR", "AUD", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("EUR", "AUD", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("EUR", "AUD", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_physical_bad_sym(src, type, physical_list_ok, digital_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTPHYSICAL", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "base must be a known physical or digital currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_network_issue(src, type, physical_list_ok, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, physical_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_bad_status(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_parsing_error(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_unexpected_json(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_rate_limit(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_known(src, type, physical_list_ok, digital_list_ok, btcaud_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
req = btcaud_ok.calls[2].request
|
||||
assert req.params["function"] == "DIGITAL_CURRENCY_DAILY"
|
||||
assert req.params["symbol"] == "BTC"
|
||||
assert req.params["market"] == "AUD"
|
||||
assert (series.base, series.quote) == ("BTC", "AUD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("43406.76014740"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("55068.43820140"))
|
||||
|
||||
|
||||
def test_fetch_digital_requests_logged(
|
||||
src, type, physical_list_ok, digital_list_ok, btcaud_ok, caplog
|
||||
):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and " curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 3
|
||||
|
||||
|
||||
def test_fetch_digital_types_but_adjclose_available(
|
||||
src, physical_list_ok, digital_list_ok, btcaud_ok
|
||||
):
|
||||
cls = src.fetch(Series("BTC", "AUD", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("BTC", "AUD", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("43406.76014740")
|
||||
assert opn.prices[0].amount == Decimal("44779.08784700")
|
||||
assert hgh.prices[0].amount == Decimal("45593.18400000")
|
||||
assert low.prices[0].amount == Decimal("38170.72220000")
|
||||
assert mid.prices[0].amount == Decimal("41881.95310000")
|
||||
|
||||
|
||||
def test_fetch_digital_adjclose_not_available(src):
|
||||
with pytest.raises(exceptions.InvalidType) as e:
|
||||
src.fetch(Series("BTC", "AUD", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert "Invalid price type 'adjclose' for pair 'BTC/AUD'." in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_type_mid_is_mean_of_low_and_high(
|
||||
src, physical_list_ok, digital_list_ok, btcaud_ok
|
||||
):
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_digital_bad_sym(src, type, physical_list_ok, digital_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTDIGITAL", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "base must be a known physical or digital currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_network_issue(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, digital_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_bad_status(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_parsing_error(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_unexpected_json(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_rate_limit(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_pair_quote_non_physical(src, type, physical_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("EUR", "BTC", type, "2021-01-04", "2021-01-08"))
|
||||
assert "quote must be a physical currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_api_key_missing(src, type, physical_list_ok, monkeypatch):
|
||||
monkeypatch.delenv(api_key_name)
|
||||
with pytest.raises(exceptions.CredentialsError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "unavailable or invalid" in str(e.value)
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Daily Prices and Volumes for Digital Currency",
|
||||
"2. Digital Currency Code": "BTC",
|
||||
"3. Digital Currency Name": "Bitcoin",
|
||||
"4. Market Code": "AUD",
|
||||
"5. Market Name": "Australian Dollar",
|
||||
"6. Last Refreshed": "2021-07-28 00:00:00",
|
||||
"7. Time Zone": "UTC"
|
||||
},
|
||||
"Time Series (Digital Currency Daily)": {
|
||||
"2021-01-09": {
|
||||
"1a. open (AUD)": "55074.06950240",
|
||||
"1b. open (USD)": "40586.96000000",
|
||||
"2a. high (AUD)": "56150.17720000",
|
||||
"2b. high (USD)": "41380.00000000",
|
||||
"3a. low (AUD)": "52540.71680000",
|
||||
"3b. low (USD)": "38720.00000000",
|
||||
"4a. close (AUD)": "54397.30924680",
|
||||
"4b. close (USD)": "40088.22000000",
|
||||
"5. volume": "75785.97967500",
|
||||
"6. market cap (USD)": "75785.97967500"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1a. open (AUD)": "53507.50941120",
|
||||
"1b. open (USD)": "39432.48000000",
|
||||
"2a. high (AUD)": "56923.63300000",
|
||||
"2b. high (USD)": "41950.00000000",
|
||||
"3a. low (AUD)": "49528.31000000",
|
||||
"3b. low (USD)": "36500.00000000",
|
||||
"4a. close (AUD)": "55068.43820140",
|
||||
"4b. close (USD)": "40582.81000000",
|
||||
"5. volume": "139789.95749900",
|
||||
"6. market cap (USD)": "139789.95749900"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1a. open (AUD)": "49893.81535840",
|
||||
"1b. open (USD)": "36769.36000000",
|
||||
"2a. high (AUD)": "54772.88310000",
|
||||
"2b. high (USD)": "40365.00000000",
|
||||
"3a. low (AUD)": "49256.92200000",
|
||||
"3b. low (USD)": "36300.00000000",
|
||||
"4a. close (AUD)": "53507.23802320",
|
||||
"4b. close (USD)": "39432.28000000",
|
||||
"5. volume": "132825.70043700",
|
||||
"6. market cap (USD)": "132825.70043700"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1a. open (AUD)": "46067.47523820",
|
||||
"1b. open (USD)": "33949.53000000",
|
||||
"2a. high (AUD)": "50124.29161740",
|
||||
"2b. high (USD)": "36939.21000000",
|
||||
"3a. low (AUD)": "45169.81872000",
|
||||
"3b. low (USD)": "33288.00000000",
|
||||
"4a. close (AUD)": "49893.81535840",
|
||||
"4b. close (USD)": "36769.36000000",
|
||||
"5. volume": "127139.20131000",
|
||||
"6. market cap (USD)": "127139.20131000"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1a. open (AUD)": "43408.17136500",
|
||||
"1b. open (USD)": "31989.75000000",
|
||||
"2a. high (AUD)": "46624.45840000",
|
||||
"2b. high (USD)": "34360.00000000",
|
||||
"3a. low (AUD)": "40572.50600000",
|
||||
"3b. low (USD)": "29900.00000000",
|
||||
"4a. close (AUD)": "46067.47523820",
|
||||
"4b. close (USD)": "33949.53000000",
|
||||
"5. volume": "116049.99703800",
|
||||
"6. market cap (USD)": "116049.99703800"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1a. open (AUD)": "44779.08784700",
|
||||
"1b. open (USD)": "33000.05000000",
|
||||
"2a. high (AUD)": "45593.18400000",
|
||||
"2b. high (USD)": "33600.00000000",
|
||||
"3a. low (AUD)": "38170.72220000",
|
||||
"3b. low (USD)": "28130.00000000",
|
||||
"4a. close (AUD)": "43406.76014740",
|
||||
"4b. close (USD)": "31988.71000000",
|
||||
"5. volume": "140899.88569000",
|
||||
"6. market cap (USD)": "140899.88569000"
|
||||
},
|
||||
"2021-01-03": {
|
||||
"1a. open (AUD)": "43661.51206300",
|
||||
"1b. open (USD)": "32176.45000000",
|
||||
"2a. high (AUD)": "47191.80858340",
|
||||
"2b. high (USD)": "34778.11000000",
|
||||
"3a. low (AUD)": "43371.85965060",
|
||||
"3b. low (USD)": "31962.99000000",
|
||||
"4a. close (AUD)": "44779.08784700",
|
||||
"4b. close (USD)": "33000.05000000",
|
||||
"5. volume": "120957.56675000",
|
||||
"6. market cap (USD)": "120957.56675000"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
currency code,currency name
|
||||
BTC,Bitcoin
|
||||
ETH,Ethereum
|
|
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Forex Daily Prices (open, high, low, close)",
|
||||
"2. From Symbol": "EUR",
|
||||
"3. To Symbol": "AUD",
|
||||
"4. Output Size": "Full size",
|
||||
"5. Last Refreshed": "2021-07-27 11:35:00",
|
||||
"6. Time Zone": "UTC"
|
||||
},
|
||||
"Time Series FX (Daily)": {
|
||||
"2021-01-11": {
|
||||
"1. open": "1.57496",
|
||||
"2. high": "1.58318",
|
||||
"3. low": "1.57290",
|
||||
"4. close": "1.57823"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "1.57879",
|
||||
"2. high": "1.58140",
|
||||
"3. low": "1.57177",
|
||||
"4. close": "1.57350"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "1.57901",
|
||||
"2. high": "1.58650",
|
||||
"3. low": "1.57757",
|
||||
"4. close": "1.57893"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "1.58390",
|
||||
"2. high": "1.58800",
|
||||
"3. low": "1.57640",
|
||||
"4. close": "1.57932"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "1.59698",
|
||||
"2. high": "1.59886",
|
||||
"3. low": "1.58100",
|
||||
"4. close": "1.58389"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "1.58741",
|
||||
"2. high": "1.60296",
|
||||
"3. low": "1.58550",
|
||||
"4. close": "1.59718"
|
||||
},
|
||||
"2021-01-01": {
|
||||
"1. open": "1.58730",
|
||||
"2. high": "1.58730",
|
||||
"3. low": "1.58504",
|
||||
"4. close": "1.58668"
|
||||
},
|
||||
"2020-12-31": {
|
||||
"1. open": "1.59946",
|
||||
"2. high": "1.60138",
|
||||
"3. low": "1.58230",
|
||||
"4. close": "1.58730"
|
||||
}
|
||||
}
|
||||
}
|
81
tests/pricehist/sources/test_alphavantage/ibm-partial.json
Normal file
81
tests/pricehist/sources/test_alphavantage/ibm-partial.json
Normal file
|
@ -0,0 +1,81 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Daily Time Series with Splits and Dividend Events",
|
||||
"2. Symbol": "IBM",
|
||||
"3. Last Refreshed": "2021-07-20",
|
||||
"4. Output Size": "Full size",
|
||||
"5. Time Zone": "US/Eastern"
|
||||
},
|
||||
"Time Series (Daily)": {
|
||||
"2021-01-11": {
|
||||
"1. open": "127.95",
|
||||
"2. high": "129.675",
|
||||
"3. low": "127.66",
|
||||
"4. close": "128.58",
|
||||
"5. adjusted close": "125.471469081",
|
||||
"6. volume": "5602466",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "128.57",
|
||||
"2. high": "129.32",
|
||||
"3. low": "126.98",
|
||||
"4. close": "128.53",
|
||||
"5. adjusted close": "125.422677873",
|
||||
"6. volume": "4676487",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "130.04",
|
||||
"2. high": "130.46",
|
||||
"3. low": "128.26",
|
||||
"4. close": "128.99",
|
||||
"5. adjusted close": "125.871556982",
|
||||
"6. volume": "4507382",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "126.9",
|
||||
"2. high": "131.88",
|
||||
"3. low": "126.72",
|
||||
"4. close": "129.29",
|
||||
"5. adjusted close": "126.164304226",
|
||||
"6. volume": "7956740",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "125.01",
|
||||
"2. high": "126.68",
|
||||
"3. low": "124.61",
|
||||
"4. close": "126.14",
|
||||
"5. adjusted close": "123.090458157",
|
||||
"6. volume": "6114619",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "125.85",
|
||||
"2. high": "125.9174",
|
||||
"3. low": "123.04",
|
||||
"4. close": "123.94",
|
||||
"5. adjusted close": "120.943645029",
|
||||
"6. volume": "5179161",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2020-12-31": {
|
||||
"1. open": "124.22",
|
||||
"2. high": "126.03",
|
||||
"3. low": "123.99",
|
||||
"4. close": "125.88",
|
||||
"5. adjusted close": "122.836743878",
|
||||
"6. volume": "3574696",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
currency code,currency name
|
||||
AUD,Australian Dollar
|
||||
EUR,Euro
|
||||
USD,United States Dollar
|
|
114
tests/pricehist/sources/test_alphavantage/search-ibm.json
Normal file
114
tests/pricehist/sources/test_alphavantage/search-ibm.json
Normal file
|
@ -0,0 +1,114 @@
|
|||
{
|
||||
"bestMatches": [
|
||||
{
|
||||
"1. symbol": "IBM",
|
||||
"2. name": "International Business Machines Corp",
|
||||
"3. type": "Equity",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "1.0000"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMJ",
|
||||
"2. name": "iShares iBonds Dec 2021 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMK",
|
||||
"2. name": "iShares iBonds Dec 2022 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBML",
|
||||
"2. name": "iShares iBonds Dec 2023 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMM",
|
||||
"2. name": "iShares iBonds Dec 2024 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMN",
|
||||
"2. name": "iShares iBonds Dec 2025 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMO",
|
||||
"2. name": "iShares iBonds Dec 2026 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.FRK",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "Frankfurt",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "20:00",
|
||||
"7. timezone": "UTC+02",
|
||||
"8. currency": "EUR",
|
||||
"9. matchScore": "0.7500"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.LON",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "United Kingdom",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "16:30",
|
||||
"7. timezone": "UTC+01",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.7500"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.DEX",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "XETRA",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "20:00",
|
||||
"7. timezone": "UTC+02",
|
||||
"8. currency": "EUR",
|
||||
"9. matchScore": "0.6667"
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Add table
Reference in a new issue