Compare commits
172 commits
Author | SHA1 | Date | |
---|---|---|---|
|
3aa09084ed | ||
|
ab507b189c | ||
|
53f39a26ef | ||
|
dffe6f8e89 | ||
|
c78154df3a | ||
|
1164724ffb | ||
|
77b2776e55 | ||
|
ee8ca0573d | ||
|
b6f4c17530 | ||
|
5e75759b0f | ||
|
59574e9156 | ||
|
51e297b752 | ||
|
b7d0d739ab | ||
|
e8dec0bf64 | ||
|
1e1003994c | ||
|
4cfee667c3 | ||
|
9eb6de4c44 | ||
|
9dd6121d4d | ||
|
5fdf16edb7 | ||
|
5a0de59aba | ||
|
8921653154 | ||
|
b8c4554298 | ||
|
47544a11b6 | ||
|
a12f3d3899 | ||
|
86e178ea96 | ||
|
1f01c54c4d | ||
|
f4aee18360 | ||
|
0b377a8d65 | ||
|
733c849286 | ||
|
96d3e44738 | ||
|
6519cf2845 | ||
|
04936c5cd6 | ||
|
46dfd876ea | ||
|
06c2876152 | ||
|
ffeebe5ffa | ||
|
2b0f01110a | ||
|
786ddd3c8c | ||
|
bd3489ea71 | ||
|
2b8460ff4b | ||
|
b7b2862b77 | ||
|
34c503f6cb | ||
|
7f4ed2f8b5 | ||
|
b99e71202a | ||
|
2398b8340f | ||
|
dfaf1b2d93 | ||
|
b522a0961c | ||
|
71ed246956 | ||
|
3c290abb95 | ||
|
d6036c9d14 | ||
|
09fbeb79cb | ||
|
582bf952e0 | ||
|
4b524960b8 | ||
|
2d2b4b1e02 | ||
|
42d969a3ba | ||
|
765e2ec77d | ||
|
a54da85a6f | ||
|
aabce7fe6f | ||
|
a3e19f9bcf | ||
|
3f65a21ffd | ||
|
bbf33df657 | ||
|
dace604129 | ||
|
99aeb6bbc7 | ||
|
46ebdfe074 | ||
|
7a9d3d3e8f | ||
|
aceb0f09d1 | ||
|
5f2b96a5bb | ||
|
66c9f42ef8 | ||
|
486d4097d7 | ||
|
0d7b813c6c | ||
|
2787c212d2 | ||
|
947eaacd29 | ||
|
039d7fb809 | ||
|
18af75ae68 | ||
|
a1b87c36f5 | ||
|
afd41da6ef | ||
|
249ea0b2db | ||
|
2c7ac5f084 | ||
|
15a39bb8a0 | ||
|
2249917494 | ||
|
336b2c3461 | ||
|
77a77e76c8 | ||
|
38beaef3be | ||
|
b2a5b4c5c9 | ||
|
b9bd3d694d | ||
|
c012af3881 | ||
|
7325ff6187 | ||
|
7becc4c0c5 | ||
|
216ab19385 | ||
|
65f8836153 | ||
|
89e8bc9964 | ||
|
1468e1f64b | ||
|
7b53204bcf | ||
|
ca63a435bd | ||
|
799aaf37cc | ||
|
1430ce97f7 | ||
|
98d71392c2 | ||
|
ed44502def | ||
|
c977286153 | ||
|
fc2561b19e | ||
|
6708800699 | ||
|
f501cc8f9a | ||
|
b5cfdaad1e | ||
|
fecd61f5f8 | ||
|
a4c0a142e3 | ||
|
c3a186fe49 | ||
|
74d9c211c9 | ||
|
d19bc66c61 | ||
|
637650245e | ||
|
9050b7948b | ||
|
78b622eade | ||
|
5d0c6aaf03 | ||
|
2d1553e2d9 | ||
|
c0af189ae0 | ||
|
0ddaf5893b | ||
|
66e95c1ac4 | ||
|
784770d9f3 | ||
|
338acf2970 | ||
|
16d0405725 | ||
|
f21636c397 | ||
|
c8337b9c2c | ||
|
3218338bff | ||
|
de4a8f2227 | ||
|
5053d57fec | ||
|
944265a7e9 | ||
|
bc482957ec | ||
|
4f6dafcbeb | ||
|
1124b6f86c | ||
|
c13e329208 | ||
|
09b7a25f9d | ||
|
c912b676b4 | ||
|
582b9fe178 | ||
|
2e12167dac | ||
|
260fc428f5 | ||
|
96315b1b00 | ||
|
a3709926e4 | ||
|
d3931310a2 | ||
|
99761ebbd1 | ||
|
81291cbf2b | ||
|
b0834575ed | ||
|
46db6e9a6f | ||
|
cdd78f0445 | ||
|
c4afbb5ec0 | ||
|
591bb33cd3 | ||
|
b868602cae | ||
|
d1704615df | ||
|
881f3b2acf | ||
|
b00bca3903 | ||
|
73c5f3a1c6 | ||
|
fadeee4870 | ||
|
d506b8502e | ||
|
ab0c0bdca7 | ||
|
ebb644fb8f | ||
|
633e84ef22 | ||
|
fac396d00c | ||
|
fcce842d82 | ||
|
c0c7e546a3 | ||
|
2aa4319dbb | ||
|
6a794cdc6f | ||
|
360138ee0f | ||
|
494a721ccd | ||
|
23be58ae74 | ||
|
2b83a6d1f0 | ||
|
15aa62a024 | ||
|
2d2b80da49 | ||
|
ba67ef310f | ||
|
98355efbc3 | ||
|
0927e27939 | ||
|
7d473c7dba | ||
|
03386e9093 | ||
|
7dee55fa17 | ||
|
2f3578c989 | ||
|
ea4cc7ebe7 |
95 changed files with 8982 additions and 823 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,3 +1,6 @@
|
|||
.pytest_cache
|
||||
.vimrc
|
||||
dist/
|
||||
.coverage
|
||||
htmlcov/
|
||||
.tox/
|
||||
|
|
33
.gitlab-ci.yml
Normal file
33
.gitlab-ci.yml
Normal file
|
@ -0,0 +1,33 @@
|
|||
image: python:latest
|
||||
|
||||
variables:
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
||||
POETRY_CACHE_DIR: "$CI_PROJECT_DIR/.cache/poetry"
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .cache/pip
|
||||
- .cache/poetry
|
||||
|
||||
before_script:
|
||||
- python -V
|
||||
- pip install poetry
|
||||
- poetry install
|
||||
|
||||
pre-commit:
|
||||
script:
|
||||
- make pre-commit
|
||||
|
||||
test:
|
||||
script:
|
||||
- poetry run pytest
|
||||
|
||||
test-live:
|
||||
script:
|
||||
- tests/live.sh
|
||||
|
||||
coverage:
|
||||
script:
|
||||
- poetry run coverage run --source=pricehist -m pytest
|
||||
- poetry run coverage report
|
||||
coverage: '/^TOTAL.+?(\d+\%)$/'
|
42
Makefile
42
Makefile
|
@ -2,13 +2,47 @@
|
|||
help: ## List make targets
|
||||
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
|
||||
|
||||
lint: ## Lint source code
|
||||
poetry run flake8
|
||||
|
||||
.PHONY: format
|
||||
format: ## Format source code
|
||||
poetry run isort .
|
||||
poetry run black .
|
||||
|
||||
.PHONY: lint
|
||||
lint: ## Lint source code
|
||||
poetry run flake8 src tests
|
||||
|
||||
.PHONY: test
|
||||
test: ## Run tests
|
||||
poetry run pytest
|
||||
poetry run pytest --color=yes
|
||||
|
||||
.PHONY: test-live
|
||||
test-live: ## Run live tests
|
||||
tests/live.sh
|
||||
|
||||
.PHONY: coverage
|
||||
coverage: ## Generate and open coverage report
|
||||
poetry run coverage run --source=pricehist -m pytest
|
||||
poetry run coverage html
|
||||
xdg-open htmlcov/index.html
|
||||
|
||||
.PHONY: install-pre-commit-hook
|
||||
install-pre-commit-hook: ## Install the git pre-commit hook
|
||||
echo -e "#!/bin/bash\nmake pre-commit" > .git/hooks/pre-commit
|
||||
chmod +x .git/hooks/pre-commit
|
||||
|
||||
.PHONY: pre-commit
|
||||
pre-commit: ## Checks to run before each commit
|
||||
poetry run isort src tests --check
|
||||
poetry run black src tests --check
|
||||
poetry run flake8 src tests
|
||||
|
||||
.PHONY: tox
|
||||
tox: ## Run tests via tox
|
||||
poetry run tox
|
||||
|
||||
.PHONY: fetch-iso-data
|
||||
fetch-iso-data: ## Fetch the latest copy of the ISO 4217 currency data
|
||||
wget -O src/pricehist/resources/list-one.xml \
|
||||
https://www.six-group.com/dam/download/financial-information/data-center/iso-currrency/lists/list-one.xml
|
||||
wget -O src/pricehist/resources/list-three.xml \
|
||||
https://www.six-group.com/dam/download/financial-information/data-center/iso-currrency/lists/list-three.xml
|
||||
|
|
461
README.md
461
README.md
|
@ -3,17 +3,28 @@
|
|||
A command-line tool for fetching and formatting historical price data, with
|
||||
support for multiple data sources and output formats.
|
||||
|
||||
[](https://gitlab.com/chrisberkhout/pricehist/-/commits/master)
|
||||
[](https://gitlab.com/chrisberkhout/pricehist/-/commits/master)
|
||||
[](https://badge.fury.io/py/pricehist)
|
||||
[](https://pepy.tech/project/pricehist)
|
||||
[](https://gitlab.com/chrisberkhout/pricehist/-/blob/master/LICENSE)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://hits.seeyoufarm.com)
|
||||
|
||||
## Installation
|
||||
|
||||
Install via [pipx](https://pypa.github.io/pipx/):
|
||||
Install via pip or
|
||||
[pipx](https://pypa.github.io/pipx/).
|
||||
|
||||
```bash
|
||||
```
|
||||
pipx install pricehist
|
||||
```
|
||||
|
||||
## Sources
|
||||
|
||||
- **`alphavantage`**: [Alpha Vantage](https://www.alphavantage.co/)
|
||||
- **`bankofcanada`**: [Bank of Canada daily exchange rates](https://www.bankofcanada.ca/valet/docs)
|
||||
- **`coinbasepro`**: [Coinbase Pro](https://pro.coinbase.com/)
|
||||
- **`coindesk`**: [CoinDesk Bitcoin Price Index](https://www.coindesk.com/coindesk-api)
|
||||
- **`coinmarketcap`**: [CoinMarketCap](https://coinmarketcap.com/)
|
||||
- **`ecb`**: [European Central Bank Euro foreign exchange reference rates](https://www.ecb.europa.eu/stats/exchange/eurofxref/html/index.en.html)
|
||||
|
@ -23,55 +34,103 @@ pipx install pricehist
|
|||
|
||||
- **`beancount`**: [Beancount](http://furius.ca/beancount/)
|
||||
- **`csv`**: [Comma-separated values](https://en.wikipedia.org/wiki/Comma-separated_values)
|
||||
- **`json`**: [JSON](https://en.wikipedia.org/wiki/JSON)
|
||||
- **`jsonl`**: [JSON lines](https://en.wikipedia.org/wiki/JSON_streaming)
|
||||
- **`gnucash-sql`**: [GnuCash](https://www.gnucash.org/) SQL
|
||||
- **`ledger`**: [Ledger](https://www.ledger-cli.org/) and [hledger](https://hledger.org/)
|
||||
|
||||
## Examples
|
||||
## Reactions
|
||||
|
||||
Show usage information:
|
||||
> This is my new favourite price fetcher, by far.
|
||||
> -- _Simon Michael, creator of [hledger](https://hledger.org/) ([ref](https://groups.google.com/g/hledger/c/SCLbNiKl9D8/m/0ReYmDppAAAJ))_
|
||||
|
||||
> This is great!
|
||||
> -- _Martin Blais, creator of [Beancount](https://beancount.github.io/) ([ref](https://groups.google.com/g/beancount/c/cCJc9OhIlNg/m/QGRvNowcAwAJ))_
|
||||
|
||||
## How to
|
||||
|
||||
### Fetch prices
|
||||
|
||||
Fetch prices by choosing a source, a pair and, optionally, a time interval.
|
||||
|
||||
```bash
|
||||
pricehist -h
|
||||
```
|
||||
```
|
||||
usage: pricehist [-h] [--version] [--verbose] {sources,source,fetch} ...
|
||||
|
||||
Fetch historical price data
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--version show version information
|
||||
--verbose show all log messages
|
||||
|
||||
commands:
|
||||
{sources,source,fetch}
|
||||
sources list sources
|
||||
source show source details
|
||||
fetch fetch prices
|
||||
```
|
||||
|
||||
Fetch prices after 2021-01-04, ending 2021-01-15, as CSV:
|
||||
|
||||
```bash
|
||||
pricehist fetch ecb EUR/AUD -sx 2021-01-04 -e 2021-01-15 -o csv
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-04 -e 2021-01-08
|
||||
```
|
||||
```
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,EUR,AUD,1.5928,ecb,reference
|
||||
2021-01-05,EUR,AUD,1.5927,ecb,reference
|
||||
2021-01-06,EUR,AUD,1.5824,ecb,reference
|
||||
2021-01-07,EUR,AUD,1.5836,ecb,reference
|
||||
2021-01-08,EUR,AUD,1.5758,ecb,reference
|
||||
2021-01-11,EUR,AUD,1.5783,ecb,reference
|
||||
2021-01-12,EUR,AUD,1.5742,ecb,reference
|
||||
2021-01-13,EUR,AUD,1.5734,ecb,reference
|
||||
2021-01-14,EUR,AUD,1.5642,ecb,reference
|
||||
2021-01-15,EUR,AUD,1.568,ecb,reference
|
||||
```
|
||||
|
||||
In Ledger format:
|
||||
The default output format is CSV, which is suitable for use in spreadsheets and
|
||||
with other tools. For example, you can generate a price chart from the command
|
||||
line as follows (or using [an alias](https://gitlab.com/-/snippets/2163031)).
|
||||
|
||||
```bash
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-01 -o ledger | head
|
||||
```
|
||||
pricehist fetch coindesk BTC/USD -s 2021-01-01 | \
|
||||
sed 1d | \
|
||||
cut -d, -f1,4 | \
|
||||
gnuplot -p -e '
|
||||
set datafile separator ",";
|
||||
set xdata time;
|
||||
set timefmt "%Y-%m-%d";
|
||||
set format x "%b\n%Y";
|
||||
plot "/dev/stdin" using 1:2 with lines title "BTC/USD"
|
||||
'
|
||||
```
|
||||
|
||||

|
||||
|
||||
### Show usage information
|
||||
|
||||
Add `-h` to any command to see usage information.
|
||||
|
||||
```
|
||||
pricehist fetch -h
|
||||
```
|
||||
```
|
||||
usage: pricehist fetch SOURCE PAIR [-h] [-vvv] [-t TYPE] [-s DATE | -sx DATE] [-e DATE | -ex DATE]
|
||||
[-o beancount|csv|json|jsonl|gnucash-sql|ledger] [--invert] [--quantize INT]
|
||||
[--fmt-base SYM] [--fmt-quote SYM] [--fmt-time TIME] [--fmt-decimal CHAR] [--fmt-thousands CHAR]
|
||||
[--fmt-symbol rightspace|right|leftspace|left] [--fmt-datesep CHAR]
|
||||
[--fmt-csvdelim CHAR] [--fmt-jsonnums]
|
||||
|
||||
positional arguments:
|
||||
SOURCE the source identifier
|
||||
PAIR pair, usually BASE/QUOTE, e.g. BTC/USD
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-vvv, --verbose show all log messages
|
||||
-t TYPE, --type TYPE price type, e.g. close
|
||||
-s DATE, --start DATE start date, inclusive (default: source start)
|
||||
-sx DATE, --startx DATE start date, exclusive
|
||||
-e DATE, --end DATE end date, inclusive (default: today)
|
||||
-ex DATE, --endx DATE end date, exclusive
|
||||
-o FMT, --output FMT output format (default: csv)
|
||||
--invert invert the price, swapping base and quote
|
||||
--quantize INT round to the given number of decimal places
|
||||
--fmt-base SYM rename the base symbol in output
|
||||
--fmt-quote SYM rename the quote symbol in output
|
||||
--fmt-time TIME set a particular time of day in output (default: 00:00:00)
|
||||
--fmt-decimal CHAR decimal point in output (default: '.')
|
||||
--fmt-thousands CHAR thousands separator in output (default: '')
|
||||
--fmt-symbol LOCATION commodity symbol placement in output (default: rightspace)
|
||||
--fmt-datesep CHAR date separator in output (default: '-')
|
||||
--fmt-csvdelim CHAR field delimiter for CSV output (default: ',')
|
||||
--fmt-jsonnums numbers not strings for JSON output (default: False)
|
||||
```
|
||||
|
||||
### Choose and customize the output format
|
||||
|
||||
As the output format you can choose one of `beancount`, `csv`, `json`, `jsonl`,
|
||||
`ledger` or `gnucash-sql`.
|
||||
|
||||
```
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-04 -e 2021-01-08 -o ledger
|
||||
```
|
||||
```
|
||||
P 2021-01-04 00:00:00 EUR 1.5928 AUD
|
||||
|
@ -79,46 +138,324 @@ P 2021-01-05 00:00:00 EUR 1.5927 AUD
|
|||
P 2021-01-06 00:00:00 EUR 1.5824 AUD
|
||||
P 2021-01-07 00:00:00 EUR 1.5836 AUD
|
||||
P 2021-01-08 00:00:00 EUR 1.5758 AUD
|
||||
P 2021-01-11 00:00:00 EUR 1.5783 AUD
|
||||
P 2021-01-12 00:00:00 EUR 1.5742 AUD
|
||||
P 2021-01-13 00:00:00 EUR 1.5734 AUD
|
||||
P 2021-01-14 00:00:00 EUR 1.5642 AUD
|
||||
P 2021-01-15 00:00:00 EUR 1.568 AUD
|
||||
```
|
||||
|
||||
Generate SQL for a GnuCash database and apply it immediately:
|
||||
Formatting options let you control certain details of the output.
|
||||
|
||||
```bash
|
||||
```
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-04 -e 2021-01-08 -o ledger \
|
||||
--fmt-time '' --fmt-datesep / --fmt-base € --fmt-quote $ --fmt-symbol left
|
||||
```
|
||||
```
|
||||
P 2021/01/04 € $1.5928
|
||||
P 2021/01/05 € $1.5927
|
||||
P 2021/01/06 € $1.5824
|
||||
P 2021/01/07 € $1.5836
|
||||
P 2021/01/08 € $1.5758
|
||||
```
|
||||
|
||||
### Fetch new prices only
|
||||
|
||||
You can update an existing file without refetching the prices you already have.
|
||||
First find the date of the last price, then fetch from there, drop the header
|
||||
line if present and append the rest to the existing file.
|
||||
|
||||
```
|
||||
last=$(tail -1 prices-eur-usd.csv | cut -d, -f1)
|
||||
pricehist fetch ecb EUR/USD -sx $last -o csv | sed 1d >> prices-eur-usd.csv
|
||||
```
|
||||
|
||||
### Load prices into GnuCash
|
||||
|
||||
You can generate SQL for a GnuCash database and apply it immediately with one
|
||||
of the following commands.
|
||||
|
||||
```
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-01 -o gnucash-sql | sqlite3 Accounts.gnucash
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-01 -o gnucash-sql | mysql -u username -p -D databasename
|
||||
pricehist fetch ecb EUR/AUD -s 2021-01-01 -o gnucash-sql | psql -U username -d databasename -v ON_ERROR_STOP=1
|
||||
```
|
||||
|
||||
## Design choices
|
||||
Beware that the GnuCash project itself does not support integration at the
|
||||
database level, so there is a risk that the SQL generated by `pricehist` will
|
||||
be ineffective or even damaging for some version of GnuCash. In practice, this
|
||||
strategy has been used successfully by other projects. Reading the SQL and
|
||||
keeping regular database backups is recommended.
|
||||
|
||||
To keep things simple, at least for now, `pricehist` provides only univariate
|
||||
time series of daily historical prices. It doesn't provide other types of
|
||||
market, financial or economic data, real-time prices, or other temporal
|
||||
resolutions. Multiple or multivariate series require multiple invocations.
|
||||
The GnuCash database must already contain commodities with mnemonics matching
|
||||
the base and quote of new prices, otherwise the SQL will fail without making
|
||||
changes.
|
||||
|
||||
Each price entry is given a GUID based on its content (date, base, quote,
|
||||
source, type and amount) and existing GUIDs are skipped in the final insert, so
|
||||
you can apply identical or overlapping SQL files multiple times without
|
||||
creating duplicate entries in the database.
|
||||
|
||||
### Show source information
|
||||
|
||||
The `source` command shows information about a source.
|
||||
|
||||
```
|
||||
pricehist source alphavantage
|
||||
```
|
||||
```
|
||||
ID : alphavantage
|
||||
Name : Alpha Vantage
|
||||
Description : Provider of market data for stocks, forex and cryptocurrencies
|
||||
URL : https://www.alphavantage.co/
|
||||
Start : 1995-01-01
|
||||
Types : close, open, high, low, adjclose, mid
|
||||
Notes : Alpha Vantage has data on...
|
||||
```
|
||||
|
||||
Available symbols can be listed for most sources, either as full pairs or as
|
||||
separate base and quote symbols that will work in certain combinations.
|
||||
|
||||
```
|
||||
pricehist source ecb --symbols
|
||||
```
|
||||
```
|
||||
EUR/AUD Euro against Australian Dollar
|
||||
EUR/BGN Euro against Bulgarian Lev
|
||||
EUR/BRL Euro against Brazilian Real
|
||||
EUR/CAD Euro against Canadian Dollar
|
||||
EUR/CHF Euro against Swiss Franc
|
||||
...
|
||||
```
|
||||
|
||||
It may also be possible to search for symbols.
|
||||
|
||||
```
|
||||
pricehist source alphavantage --search Tesla
|
||||
```
|
||||
```
|
||||
TL0.DEX Tesla, Equity, XETRA, EUR
|
||||
TL0.FRK Tesla, Equity, Frankfurt, EUR
|
||||
TSLA34.SAO Tesla, Equity, Brazil/Sao Paolo, BRL
|
||||
TSLA Tesla Inc, Equity, United States, USD
|
||||
TXLZF Tesla Exploration Ltd, Equity, United States, USD
|
||||
```
|
||||
|
||||
### Inspect source interactions
|
||||
|
||||
You can see extra information by adding the verbose option (`--verbose` or
|
||||
`-vvv`), including `curl` commands that reproduce each request to a source.
|
||||
|
||||
```
|
||||
pricehist fetch coindesk BTC/USD -s 2021-01-01 -e 2021-01-05 -vvv
|
||||
```
|
||||
```
|
||||
DEBUG Began pricehist run at 2021-08-12 14:38:26.630357.
|
||||
DEBUG Starting new HTTPS connection (1): api.coindesk.com:443
|
||||
DEBUG https://api.coindesk.com:443 "GET /v1/bpi/historical/close.json?currency=USD&start=2021-01-01&end=2021-01-05 HTTP/1.1" 200 319
|
||||
DEBUG curl -X GET -H 'Accept: */*' -H 'Accept-Encoding: gzip, deflate' -H 'Connection: keep-alive' -H 'User-Agent: python-requests/2.25.1' --compressed 'https://api.coindesk.com/v1/bpi/historical/close.json?currency=USD&start=2021-01-01&end=2021-01-05'
|
||||
DEBUG Available data covers the interval [2021-01-01--2021-01-05], as requested.
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-01,BTC,USD,29391.775,coindesk,close
|
||||
2021-01-02,BTC,USD,32198.48,coindesk,close
|
||||
2021-01-03,BTC,USD,33033.62,coindesk,close
|
||||
2021-01-04,BTC,USD,32017.565,coindesk,close
|
||||
2021-01-05,BTC,USD,34035.0067,coindesk,close
|
||||
DEBUG Ended pricehist run at 2021-08-12 14:38:26.709428.
|
||||
```
|
||||
|
||||
Running a logged `curl` command shows exactly what data is returned by the
|
||||
source.
|
||||
|
||||
```
|
||||
pricehist fetch coindesk BTC/USD -s 2021-01-01 -e 2021-01-05 -vvv 2>&1 \
|
||||
| grep '^DEBUG curl' | sed 's/^DEBUG //' | bash | jq .
|
||||
```
|
||||
```json
|
||||
{
|
||||
"bpi": {
|
||||
"2021-01-01": 29391.775,
|
||||
"2021-01-02": 32198.48,
|
||||
"2021-01-03": 33033.62,
|
||||
"2021-01-04": 32017.565,
|
||||
"2021-01-05": 34035.0067
|
||||
},
|
||||
"disclaimer": "This data was produced from the CoinDesk Bitcoin Price Index. BPI value data returned as USD.",
|
||||
"time": {
|
||||
"updated": "Jan 6, 2021 00:03:00 UTC",
|
||||
"updatedISO": "2021-01-06T00:03:00+00:00"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use via `bean-price`
|
||||
|
||||
Beancount users may wish to use `pricehist` sources via `bean-price`. To do so,
|
||||
ensure the `pricehist` package is installed in an accessible location.
|
||||
|
||||
You can fetch the latest price directly from the command line.
|
||||
|
||||
```
|
||||
bean-price -e "USD:pricehist.beanprice.coindesk/BTC:USD"
|
||||
```
|
||||
```
|
||||
2021-08-18 price BTC:USD 44725.12 USD
|
||||
```
|
||||
|
||||
You can fetch a series of prices by providing a Beancount file as input.
|
||||
|
||||
```
|
||||
; input.beancount
|
||||
2021-08-14 commodity BTC
|
||||
price: "USD:pricehist.beanprice.coindesk/BTC:USD:close"
|
||||
```
|
||||
|
||||
```
|
||||
bean-price input.beancount --update --update-rate daily --inactive --clear-cache
|
||||
```
|
||||
```
|
||||
2021-08-14 price BTC 47098.2633 USD
|
||||
2021-08-15 price BTC 47018.9017 USD
|
||||
2021-08-16 price BTC 45927.405 USD
|
||||
2021-08-17 price BTC 44686.3333 USD
|
||||
2021-08-18 price BTC 44725.12 USD
|
||||
```
|
||||
|
||||
Adding `-v` will print progress information, `-vv` will print debug information,
|
||||
including that from `pricehist`.
|
||||
|
||||
A source map specification for `bean-price` has the form
|
||||
`<currency>:<module>/[^]<ticker>`. Additional `<module>/[^]<ticker>` parts can
|
||||
be appended, separated by commas.
|
||||
|
||||
The module name will be of the form `pricehist.beanprice.<source_id>`.
|
||||
|
||||
The ticker symbol will be of the form `BASE:QUOTE:TYPE`.
|
||||
|
||||
Any non-alphanumeric characters except the equals sign (`=`), hyphen (`-`),
|
||||
period (`.`), or parentheses (`(` or `)`) are special characters that need to
|
||||
be encoded as their a two-digit hexadecimal code prefixed with an underscore,
|
||||
because `bean-price` ticker symbols don't allow all the characters used by
|
||||
`pricehist` pairs.
|
||||
[This page](https://replit.com/@chrisberkhout/bpticker) will do it for you.
|
||||
|
||||
For example, the Yahoo! Finance symbol for the Dow Jones Industrial Average is
|
||||
`^DJI`, and would have the source map specification
|
||||
`USD:pricehist.beanprice.yahoo/_5eDJI`, or for the daily high price
|
||||
`USD:pricehist.beanprice.yahoo/_5eDJI::high`.
|
||||
|
||||
### Use as a library
|
||||
|
||||
You may find `pricehist`'s source classes useful in your own scripts.
|
||||
|
||||
```
|
||||
$ python
|
||||
Python 3.9.6 (default, Jun 30 2021, 10:22:16)
|
||||
[GCC 11.1.0] on linux
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
>>> from pricehist.series import Series
|
||||
>>> from pricehist.sources.ecb import ECB
|
||||
>>> series = ECB().fetch(Series("EUR", "AUD", "reference", "2021-01-04", "2021-01-08"))
|
||||
>>> series.prices
|
||||
[Price(date='2021-01-04', amount=Decimal('1.5928')), Price(date='2021-01-05', amount=Decimal('1.5927')), Price(date='2021-01-06', amount=Decimal('1.5824')), Price(date='2021-01-07', amount=Decimal('1.5836')), Price(date='2021-01-08', amount=Decimal('1.5758'))]
|
||||
```
|
||||
|
||||
A subclass of `pricehist.exceptions.SourceError` will be raised for any error.
|
||||
|
||||
### Contribute
|
||||
|
||||
Contributions are welcome! If you discover a bug or want to work on a
|
||||
non-trivial change, please open a
|
||||
[GitLab issue](https://gitlab.com/chrisberkhout/pricehist/-/issues)
|
||||
to discuss it.
|
||||
|
||||
Run `make install-pre-commit-hook` set up local pre-commit checks.
|
||||
Set up your editor to run
|
||||
[isort](https://pycqa.github.io/isort/),
|
||||
[Black](https://black.readthedocs.io/en/stable/) and
|
||||
[Flake8](https://flake8.pycqa.org/en/latest/),
|
||||
or run them manually via `make format lint`.
|
||||
|
||||
## Terminology
|
||||
|
||||
A **source** is an upstream service that can provide a series of prices.
|
||||
|
||||
Each **series** of prices is for one pair and price type.
|
||||
|
||||
The [**pair**](https://en.wikipedia.org/wiki/Currency_pair) is made up of a
|
||||
base and a quote, each given as a symbol. Sometimes you will give the base
|
||||
only, and the quote will be determined with information from the source. The
|
||||
available pairs, the symbols used in them and the available price types all
|
||||
depend on the particular source used.
|
||||
|
||||
The **base** is the currency or commodity being valued. Each price expresses
|
||||
the value of one unit of the base.
|
||||
|
||||
The **quote** is the unit used to express the value of the base.
|
||||
|
||||
A **symbol** is a code or abbreviation for a currency or commodity.
|
||||
|
||||
The **prices** in a series each have a date and an amount.
|
||||
|
||||
The **amount** is the number of units of the quote that are equal to one unit
|
||||
of the base.
|
||||
|
||||
Consider the following command.
|
||||
|
||||
```
|
||||
pricehist fetch coindesk BTC/USD --type close
|
||||
```
|
||||
|
||||
- **`coindesk`** is the ID of the CoinDesk Bitcoin Price Index source.
|
||||
- **`BTC`** is the symbol for Bitcoin, used here as the base.
|
||||
- **`USD`** is the symbol for the United States Dollar, used here as the quote.
|
||||
- **`BTC/USD`** is the pair Bitcoin against United States Dollar.
|
||||
- **`close`** is the price type for the last price of each day.
|
||||
|
||||
A BTC/USD price of the amount 29,391.775 can be written as
|
||||
"BTC/USD = 29391.775" or "BTC 29391.775 USD", and means that one Bitcoin is
|
||||
worth 29,391.775 United States Dollars.
|
||||
|
||||
## Initial design choices
|
||||
|
||||
To keep things simple, `pricehist` provides only univariate time series of
|
||||
daily historical prices. It doesn't provide other types of market, financial or
|
||||
economic data, real-time prices, or other temporal resolutions. Multiple or
|
||||
multivariate series require multiple invocations.
|
||||
|
||||
## Potential features
|
||||
|
||||
In the future, `pricehist` may be extended to cover some of the following
|
||||
features:
|
||||
|
||||
- **Time of day**: Sources sometimes provide specific times for each day's
|
||||
high/low prices and these could be preserved for output. This would require
|
||||
changes to how dates are handled internally, clarification of time zone
|
||||
handling and extension of the time formatting option.
|
||||
- **Alternate resolutions**: Some sources can provide higher or lower
|
||||
resolution data, such as hourly or weekly. These could be supported where
|
||||
available. For other cases an option could be provided for downsampling data
|
||||
before output.
|
||||
- **Real-time prices**: These generally come from different source endpoints
|
||||
than the historical data. Real-time prices will usually have a different
|
||||
price type, such as `last`, `bid` or `ask`. Support for real-time prices
|
||||
would allow adding sources that don't provide historical data. Start and end
|
||||
times are irrelevant when requesting real-time prices. A "follow" option
|
||||
could continuously poll for new prices.
|
||||
- **Related non-price data**: Trading volume, spreads, split and dividend
|
||||
events and other related data could be supported. The base/quote/type model
|
||||
used for prices would work for some of this. Other things may require
|
||||
extending the model.
|
||||
- **Multivariate series**: Would allow, for example, fetching
|
||||
high/low/open/close prices in a single invocation.
|
||||
- **`format` command**: A command for rewriting existing CSV data into one of
|
||||
the other output formats.
|
||||
|
||||
## Alternatives
|
||||
|
||||
Beancount's [`bean-price`](https://beancount.github.io/docs/fetching_prices_in_beancount.html)
|
||||
tool fetches historical prices and addresses other workflow concerns in a
|
||||
Beancount-specific manner.
|
||||
Beancount's [`bean-price`](https://github.com/beancount/beanprice) tool fetches
|
||||
prices and addresses other workflow concerns in a Beancount-specific manner,
|
||||
generally requiring a Beancount file as input.
|
||||
|
||||
The [Piecash](https://piecash.readthedocs.io/) library is a pythonic interface
|
||||
to GnuCash files stored in SQL which has a
|
||||
[`Commodity.update_prices`](https://piecash.readthedocs.io/en/master/api/piecash.core.commodity.html?highlight=update_prices#piecash.core.commodity.Commodity.update_prices)
|
||||
method for fetching historical prices.
|
||||
The GnuCash wiki documents [wrapper scripts](https://wiki.gnucash.org/wiki/Stocks/get_prices)
|
||||
for the [Finance::QuoteHist](https://metacpan.org/pod/Finance::QuoteHist) Perl
|
||||
module.
|
||||
|
||||
Some other projects with related goals include:
|
||||
* [`hledger-stockquotes`](https://github.com/prikhi/hledger-stockquotes):
|
||||
Generate an HLedger journal containing daily stock quotes for your commodities.
|
||||
* [`ledger_get_prices`](https://github.com/nathankot/ledger-get-prices):
|
||||
Uses Yahoo finance to intelligently generate a ledger price database based on your current ledger commodities and time period.
|
||||
* [LedgerStockUpdate](https://github.com/adchari/LedgerStockUpdate):
|
||||
Locates any stocks you have in your ledger-cli file, then generates a price database of those stocks.
|
||||
* [`market-prices`](https://github.com/barrucadu/hledger-scripts#market-prices):
|
||||
Downloads market values of commodities from a few different sources.
|
||||
* [price-database](https://gitlab.com/alensiljak/price-database):
|
||||
A Python library and a CLI for storage of prices.
|
||||
|
|
BIN
example-gnuplot.png
Normal file
BIN
example-gnuplot.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 32 KiB |
1029
poetry.lock
generated
1029
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "pricehist"
|
||||
version = "0.1.1"
|
||||
version = "1.4.12"
|
||||
description = "Fetch and format historical price data"
|
||||
authors = ["Chris Berkhout <chris@chrisberkhout.com>"]
|
||||
license = "MIT"
|
||||
|
@ -10,20 +10,25 @@ homepage = "https://gitlab.com/chrisberkhout/pricehist"
|
|||
repository = "https://gitlab.com/chrisberkhout/pricehist"
|
||||
include = [
|
||||
"LICENSE",
|
||||
"example-gnuplot.png",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
python = "^3.8.1"
|
||||
requests = "^2.25.1"
|
||||
lxml = "^4.6.2"
|
||||
lxml = "^5.1.0"
|
||||
cssselect = "^1.1.0"
|
||||
curlify = "^2.2.1"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^6.2.2"
|
||||
black = "^20.8b1"
|
||||
flake8 = "^3.9.1"
|
||||
pytest = "^8.3.2"
|
||||
black = "^22.10.0"
|
||||
flake8 = "^7.1.0"
|
||||
isort = "^5.8.0"
|
||||
responses = "^0.13.3"
|
||||
coverage = "^5.5"
|
||||
pytest-mock = "^3.6.1"
|
||||
tox = "^3.24.3"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
|
@ -35,3 +40,6 @@ pricehist = "pricehist.cli:cli"
|
|||
[tool.isort]
|
||||
profile = "black"
|
||||
multi_line_output = 3
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
markers = []
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.1"
|
||||
__version__ = "1.4.12"
|
||||
|
|
77
src/pricehist/beanprice/__init__.py
Normal file
77
src/pricehist/beanprice/__init__.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import re
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
from typing import List, NamedTuple, Optional
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.series import Series
|
||||
|
||||
SourcePrice = NamedTuple(
|
||||
"SourcePrice",
|
||||
[
|
||||
("price", Decimal),
|
||||
("time", Optional[datetime]),
|
||||
("quote_currency", Optional[str]),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def source(pricehist_source):
|
||||
class Source:
|
||||
def get_latest_price(self, ticker: str) -> Optional[SourcePrice]:
|
||||
time_end = datetime.combine(date.today(), datetime.min.time())
|
||||
time_begin = time_end - timedelta(days=7)
|
||||
prices = self.get_prices_series(ticker, time_begin, time_end)
|
||||
if prices:
|
||||
return prices[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_historical_price(
|
||||
self, ticker: str, time: datetime
|
||||
) -> Optional[SourcePrice]:
|
||||
prices = self.get_prices_series(ticker, time, time)
|
||||
if prices:
|
||||
return prices[-1]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_prices_series(
|
||||
self,
|
||||
ticker: str,
|
||||
time_begin: datetime,
|
||||
time_end: datetime,
|
||||
) -> Optional[List[SourcePrice]]:
|
||||
base, quote, type = self._decode(ticker)
|
||||
|
||||
start = time_begin.date().isoformat()
|
||||
end = time_end.date().isoformat()
|
||||
|
||||
local_tz = datetime.now(timezone.utc).astimezone().tzinfo
|
||||
user_tz = time_begin.tzinfo or local_tz
|
||||
|
||||
try:
|
||||
series = pricehist_source.fetch(Series(base, quote, type, start, end))
|
||||
except exceptions.SourceError:
|
||||
return None
|
||||
|
||||
return [
|
||||
SourcePrice(
|
||||
price.amount,
|
||||
datetime.fromisoformat(price.date).replace(tzinfo=user_tz),
|
||||
series.quote,
|
||||
)
|
||||
for price in series.prices
|
||||
]
|
||||
|
||||
def _decode(self, ticker):
|
||||
# https://github.com/beancount/beanprice/blob/b05203/beanprice/price.py#L166
|
||||
parts = [
|
||||
re.sub(r"_[0-9a-fA-F]{2}", lambda m: chr(int(m.group(0)[1:], 16)), part)
|
||||
for part in ticker.split(":")
|
||||
]
|
||||
base, quote, candidate_type = (parts + [""] * 3)[0:3]
|
||||
type = candidate_type or pricehist_source.types()[0]
|
||||
return (base, quote, type)
|
||||
|
||||
return Source
|
4
src/pricehist/beanprice/alphavantage.py
Normal file
4
src/pricehist/beanprice/alphavantage.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.alphavantage import AlphaVantage
|
||||
|
||||
Source = beanprice.source(AlphaVantage())
|
4
src/pricehist/beanprice/bankofcanada.py
Normal file
4
src/pricehist/beanprice/bankofcanada.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.bankofcanada import BankOfCanada
|
||||
|
||||
Source = beanprice.source(BankOfCanada())
|
4
src/pricehist/beanprice/coinbasepro.py
Normal file
4
src/pricehist/beanprice/coinbasepro.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.coinbasepro import CoinbasePro
|
||||
|
||||
Source = beanprice.source(CoinbasePro())
|
4
src/pricehist/beanprice/coindesk.py
Normal file
4
src/pricehist/beanprice/coindesk.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.coindesk import CoinDesk
|
||||
|
||||
Source = beanprice.source(CoinDesk())
|
4
src/pricehist/beanprice/coinmarketcap.py
Normal file
4
src/pricehist/beanprice/coinmarketcap.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.coinmarketcap import CoinMarketCap
|
||||
|
||||
Source = beanprice.source(CoinMarketCap())
|
4
src/pricehist/beanprice/ecb.py
Normal file
4
src/pricehist/beanprice/ecb.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.ecb import ECB
|
||||
|
||||
Source = beanprice.source(ECB())
|
4
src/pricehist/beanprice/exchangeratehost.py
Normal file
4
src/pricehist/beanprice/exchangeratehost.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.exchangeratehost import ExchangeRateHost
|
||||
|
||||
Source = beanprice.source(ExchangeRateHost())
|
4
src/pricehist/beanprice/yahoo.py
Normal file
4
src/pricehist/beanprice/yahoo.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from pricehist import beanprice
|
||||
from pricehist.sources.yahoo import Yahoo
|
||||
|
||||
Source = beanprice.source(Yahoo())
|
|
@ -10,13 +10,13 @@ from pricehist.format import Format
|
|||
from pricehist.series import Series
|
||||
|
||||
|
||||
def cli(args=None, output_file=sys.stdout):
|
||||
def cli(argv=sys.argv):
|
||||
start_time = datetime.now()
|
||||
|
||||
logger.init()
|
||||
|
||||
parser = build_parser()
|
||||
args = parser.parse_args()
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
if args.verbose:
|
||||
logger.show_debug()
|
||||
|
@ -25,92 +25,105 @@ def cli(args=None, output_file=sys.stdout):
|
|||
|
||||
try:
|
||||
if args.version:
|
||||
print(f"pricehist v{__version__}", file=output_file)
|
||||
print(f"pricehist {__version__}")
|
||||
elif args.command == "sources":
|
||||
result = sources.formatted()
|
||||
print(result, file=output_file)
|
||||
print(result)
|
||||
elif args.command == "source" and args.symbols:
|
||||
result = sources.by_id[args.source].format_symbols()
|
||||
print(result, file=output_file, end="")
|
||||
print(result, end="")
|
||||
elif args.command == "source" and args.search:
|
||||
result = sources.by_id[args.source].format_search(args.search)
|
||||
print(result, file=output_file, end="")
|
||||
print(result, end="")
|
||||
elif args.command == "source":
|
||||
total_width = shutil.get_terminal_size().columns
|
||||
result = sources.by_id[args.source].format_info(total_width)
|
||||
print(result, file=output_file)
|
||||
print(result)
|
||||
elif args.command == "fetch":
|
||||
source = sources.by_id[args.source]
|
||||
output = outputs.by_type[args.output]
|
||||
if args.start:
|
||||
start = args.start
|
||||
else:
|
||||
start = source.start()
|
||||
logging.info(f"Using the source default start date of {start}.")
|
||||
series = Series(
|
||||
base=args.pair[0],
|
||||
quote=args.pair[1],
|
||||
type=args.type or (source.types() + ["(none)"])[0],
|
||||
start=start,
|
||||
end=args.end,
|
||||
)
|
||||
if series.type not in source.types():
|
||||
logging.critical(
|
||||
f"The requested price type '{series.type}' is not "
|
||||
if args.end < args.start:
|
||||
parser.error(
|
||||
f"The end date '{args.end}' preceeds the start date '{args.start}'!"
|
||||
)
|
||||
if args.type not in source.types():
|
||||
parser.error(
|
||||
f"The requested price type '{args.type}' is not "
|
||||
f"recognized by the {source.id()} source!"
|
||||
)
|
||||
sys.exit(1)
|
||||
series = Series(
|
||||
base=source.normalizesymbol(args.pair[0]),
|
||||
quote=source.normalizesymbol(args.pair[1]),
|
||||
type=args.type,
|
||||
start=args.start,
|
||||
end=args.end,
|
||||
)
|
||||
fmt = Format.fromargs(args)
|
||||
result = fetch(series, source, output, args.invert, args.quantize, fmt)
|
||||
print(result, end="", file=output_file)
|
||||
print(result, end="")
|
||||
else:
|
||||
parser.print_help(file=sys.stderr)
|
||||
parser.print_help()
|
||||
except BrokenPipeError:
|
||||
logging.debug("The output pipe was closed early.")
|
||||
finally:
|
||||
logging.debug(f"Ended pricehist run at {datetime.now()}.")
|
||||
|
||||
|
||||
def valid_pair(s):
|
||||
base, quote = (s + "/").split("/")[0:2]
|
||||
if base == "":
|
||||
msg = f"No base found in the requested pair '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return (base, quote)
|
||||
|
||||
|
||||
def valid_date(s):
|
||||
if s == "today":
|
||||
return today()
|
||||
try:
|
||||
return datetime.strptime(s, "%Y-%m-%d").date().isoformat()
|
||||
except ValueError:
|
||||
msg = f"Not a valid YYYY-MM-DD date: '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
|
||||
def valid_date_before(s):
|
||||
return (
|
||||
datetime.strptime(valid_date(s), "%Y-%m-%d").date() - timedelta(days=1)
|
||||
).isoformat()
|
||||
|
||||
|
||||
def valid_date_after(s):
|
||||
return (
|
||||
datetime.strptime(valid_date(s), "%Y-%m-%d").date() + timedelta(days=1)
|
||||
).isoformat()
|
||||
|
||||
|
||||
def valid_char(s):
|
||||
if len(s) == 1:
|
||||
return s
|
||||
else:
|
||||
msg = f"Not a single character: '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
|
||||
def today():
|
||||
return datetime.now().date().isoformat()
|
||||
|
||||
|
||||
def build_parser():
|
||||
def valid_pair(s):
|
||||
base, quote = (s + "/").split("/")[0:2]
|
||||
if base == "":
|
||||
msg = f"No base found in the requested pair '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return (base, quote)
|
||||
|
||||
def valid_date(s):
|
||||
if s == "today":
|
||||
return today()
|
||||
try:
|
||||
return datetime.strptime(s, "%Y-%m-%d").date().isoformat()
|
||||
except ValueError:
|
||||
msg = f"Not a valid YYYY-MM-DD date: '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
def previous_valid_date(s):
|
||||
return (
|
||||
datetime.strptime(valid_date(s), "%Y-%m-%d").date() - timedelta(days=1)
|
||||
).isoformat()
|
||||
|
||||
def following_valid_date(s):
|
||||
return (
|
||||
datetime.strptime(valid_date(s), "%Y-%m-%d").date() + timedelta(days=1)
|
||||
).isoformat()
|
||||
|
||||
def today():
|
||||
return datetime.now().date().isoformat()
|
||||
|
||||
def valid_char(s):
|
||||
if len(s) == 1:
|
||||
return s
|
||||
else:
|
||||
msg = f"Not a single character: '{s}'."
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
def formatter(prog):
|
||||
return argparse.HelpFormatter(prog, max_help_position=50)
|
||||
|
||||
class SetSourceDefaults(argparse.Action):
|
||||
def __call__(self, parser, namespace, value, option_string=None):
|
||||
source = sources.by_id[value]
|
||||
setattr(namespace, self.dest, value)
|
||||
if getattr(namespace, "type") is None:
|
||||
setattr(namespace, "type", source.types()[0])
|
||||
if getattr(namespace, "start") is None:
|
||||
setattr(namespace, "start", source.start())
|
||||
|
||||
default_fmt = Format()
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="pricehist",
|
||||
|
@ -125,6 +138,7 @@ def build_parser():
|
|||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-vvv",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="show all log messages",
|
||||
|
@ -132,11 +146,17 @@ def build_parser():
|
|||
|
||||
subparsers = parser.add_subparsers(title="commands", dest="command")
|
||||
|
||||
subparsers.add_parser(
|
||||
sources_parser = subparsers.add_parser(
|
||||
"sources",
|
||||
help="list sources",
|
||||
formatter_class=formatter,
|
||||
)
|
||||
sources_parser.add_argument(
|
||||
"-vvv",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="show all log messages",
|
||||
)
|
||||
|
||||
source_parser = subparsers.add_parser(
|
||||
"source",
|
||||
|
@ -151,6 +171,12 @@ def build_parser():
|
|||
choices=sources.by_id.keys(),
|
||||
help="the source identifier",
|
||||
)
|
||||
source_parser.add_argument(
|
||||
"-vvv",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="show all log messages",
|
||||
)
|
||||
|
||||
source_list_or_search = source_parser.add_mutually_exclusive_group(required=False)
|
||||
source_list_or_search.add_argument(
|
||||
|
@ -172,14 +198,14 @@ def build_parser():
|
|||
usage=(
|
||||
# Set usage manually to have positional arguments before options
|
||||
# and show allowed values where appropriate
|
||||
"pricehist fetch SOURCE PAIR [-h] "
|
||||
"pricehist fetch SOURCE PAIR [-h] [-vvv] "
|
||||
"[-t TYPE] [-s DATE | -sx DATE] [-e DATE | -ex DATE] "
|
||||
f"[-o {'|'.join(outputs.by_type.keys())}] "
|
||||
"[--invert] [--quantize INT] "
|
||||
"[--fmt-base SYM] [--fmt-quote SYM] [--fmt-time TIME] "
|
||||
"[--fmt-decimal CHAR] [--fmt-thousands CHAR] "
|
||||
"[--fmt-symbol rightspace|right|leftspace|left] [--fmt-datesep CHAR] "
|
||||
"[--fmt-csvdelim CHAR]"
|
||||
"[--fmt-csvdelim CHAR] [--fmt-jsonnums]"
|
||||
),
|
||||
formatter_class=formatter,
|
||||
)
|
||||
|
@ -188,6 +214,7 @@ def build_parser():
|
|||
metavar="SOURCE",
|
||||
type=str,
|
||||
choices=sources.by_id.keys(),
|
||||
action=SetSourceDefaults,
|
||||
help="the source identifier",
|
||||
)
|
||||
fetch_parser.add_argument(
|
||||
|
@ -196,13 +223,19 @@ def build_parser():
|
|||
type=valid_pair,
|
||||
help="pair, usually BASE/QUOTE, e.g. BTC/USD",
|
||||
)
|
||||
fetch_parser.add_argument(
|
||||
"-vvv",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="show all log messages",
|
||||
)
|
||||
fetch_parser.add_argument(
|
||||
"-t",
|
||||
"--type",
|
||||
dest="type",
|
||||
metavar="TYPE",
|
||||
type=str,
|
||||
help="price type, e.g. close",
|
||||
help="price type, e.g. close (default: first for source)",
|
||||
)
|
||||
fetch_start_group = fetch_parser.add_mutually_exclusive_group(required=False)
|
||||
fetch_start_group.add_argument(
|
||||
|
@ -218,7 +251,7 @@ def build_parser():
|
|||
"--startx",
|
||||
dest="start",
|
||||
metavar="DATE",
|
||||
type=following_valid_date,
|
||||
type=valid_date_after,
|
||||
help="start date, exclusive",
|
||||
)
|
||||
|
||||
|
@ -237,7 +270,7 @@ def build_parser():
|
|||
"--endx",
|
||||
dest="end",
|
||||
metavar="DATE",
|
||||
type=previous_valid_date,
|
||||
type=valid_date_before,
|
||||
help="end date, exclusive",
|
||||
)
|
||||
|
||||
|
@ -320,5 +353,11 @@ def build_parser():
|
|||
type=valid_char,
|
||||
help=f"field delimiter for CSV output (default: '{default_fmt.csvdelim}')",
|
||||
)
|
||||
fetch_parser.add_argument(
|
||||
"--fmt-jsonnums",
|
||||
dest="formatjsonnums",
|
||||
action="store_true",
|
||||
help=f"numbers not strings for JSON output (default: {default_fmt.jsonnums})",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
|
94
src/pricehist/exceptions.py
Normal file
94
src/pricehist/exceptions.py
Normal file
|
@ -0,0 +1,94 @@
|
|||
import logging
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
@contextmanager
|
||||
def handler():
|
||||
try:
|
||||
yield
|
||||
except SourceError as e:
|
||||
logging.debug("Critical exception encountered", exc_info=e)
|
||||
logging.critical(str(e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class SourceError(Exception):
|
||||
"""Base exception for errors rased by sources"""
|
||||
|
||||
|
||||
class InvalidPair(SourceError, ValueError):
|
||||
"""An invalid pair was requested."""
|
||||
|
||||
def __init__(self, base, quote, source, message=None):
|
||||
self.base = base
|
||||
self.quote = quote
|
||||
self.source = source
|
||||
pair = "/".join([s for s in [base, quote] if s])
|
||||
insert = message + " " if message else ""
|
||||
|
||||
full_message = (
|
||||
f"Invalid pair '{pair}'. {insert}"
|
||||
f"Run 'pricehist source {source.id()} --symbols' "
|
||||
f"for information about valid pairs."
|
||||
)
|
||||
super(InvalidPair, self).__init__(full_message)
|
||||
|
||||
|
||||
class InvalidType(SourceError, ValueError):
|
||||
"""An invalid price type was requested."""
|
||||
|
||||
def __init__(self, type, base, quote, source):
|
||||
self.type = type
|
||||
self.pair = "/".join([s for s in [base, quote] if s])
|
||||
message = (
|
||||
f"Invalid price type '{type}' for pair '{self.pair}'. "
|
||||
f"Run 'pricehist source {source.id()}' "
|
||||
f"for information about valid types."
|
||||
)
|
||||
super(InvalidType, self).__init__(message)
|
||||
|
||||
|
||||
class CredentialsError(SourceError):
|
||||
"""Access credentials are unavailable or invalid."""
|
||||
|
||||
def __init__(self, keys, source, msg=""):
|
||||
self.keys = keys
|
||||
self.source = source
|
||||
message = (
|
||||
f"Access credentials for source '{source.id()}' are unavailable "
|
||||
f"""or invalid. Set the environment variables '{"', '".join(keys)}' """
|
||||
f"correctly. Run 'pricehist source {source.id()}' for more "
|
||||
f"information about credentials."
|
||||
)
|
||||
if msg:
|
||||
message += f" {msg}"
|
||||
super(CredentialsError, self).__init__(message)
|
||||
|
||||
|
||||
class RateLimit(SourceError):
|
||||
"""Source request rate limit reached."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(RateLimit, self).__init__(f"{self.__doc__} {message}")
|
||||
|
||||
|
||||
class RequestError(SourceError):
|
||||
"""An error occured while making a request to the source."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(RequestError, self).__init__(f"{self.__doc__} {message}")
|
||||
|
||||
|
||||
class BadResponse(SourceError):
|
||||
"""A bad response was received from the source."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(BadResponse, self).__init__(f"{self.__doc__} {message}")
|
||||
|
||||
|
||||
class ResponseParsingError(SourceError):
|
||||
"""An error occurred while parsing data from the source."""
|
||||
|
||||
def __init__(self, message):
|
||||
super(ResponseParsingError, self).__init__(f"{self.__doc__} {message}")
|
|
@ -1,26 +1,38 @@
|
|||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import date, datetime, timedelta
|
||||
|
||||
from pricehist import exceptions
|
||||
|
||||
|
||||
def fetch(series, source, output, invert: bool, quantize: int, fmt) -> str:
|
||||
if series.start < source.start():
|
||||
logging.warn(
|
||||
logging.warning(
|
||||
f"The start date {series.start} preceeds the {source.name()} "
|
||||
f"source start date of {source.start()}."
|
||||
)
|
||||
|
||||
series = source.fetch(series)
|
||||
with exceptions.handler():
|
||||
series = source.fetch(series)
|
||||
|
||||
if len(series.prices) == 0:
|
||||
logging.warn(f"No data found for the interval [{series.start}--{series.end}].")
|
||||
logging.warning(
|
||||
f"No data found for the interval [{series.start}--{series.end}]."
|
||||
)
|
||||
else:
|
||||
first = series.prices[0].date
|
||||
last = series.prices[-1].date
|
||||
if series.start < first or series.end > last:
|
||||
logging.warn(
|
||||
f"Available data covers the interval [{first}--{last}], "
|
||||
f"{_cov_description(series.start, series.end, first, last)}."
|
||||
)
|
||||
message = (
|
||||
f"Available data covers the interval [{first}--{last}], "
|
||||
f"{_cov_description(series.start, series.end, first, last)}."
|
||||
)
|
||||
if first > series.start or last < series.end:
|
||||
expected_end = _yesterday() if series.end == _today() else series.end
|
||||
if first == series.start and last == expected_end:
|
||||
logging.debug(message) # Missing today's price is expected
|
||||
else:
|
||||
logging.warning(message)
|
||||
else:
|
||||
logging.debug(message)
|
||||
|
||||
if invert:
|
||||
series = series.invert()
|
||||
|
@ -30,6 +42,14 @@ def fetch(series, source, output, invert: bool, quantize: int, fmt) -> str:
|
|||
return output.format(series, source, fmt=fmt)
|
||||
|
||||
|
||||
def _today():
|
||||
return date.today().isoformat()
|
||||
|
||||
|
||||
def _yesterday():
|
||||
return (date.today() - timedelta(days=1)).isoformat()
|
||||
|
||||
|
||||
def _cov_description(
|
||||
requested_start: str, requested_end: str, actual_start: str, actual_end: str
|
||||
) -> str:
|
||||
|
@ -60,5 +80,7 @@ def _cov_description(
|
|||
f"and ends {end_uncovered} day{s(end_uncovered)} earlier "
|
||||
f"than requested"
|
||||
)
|
||||
else:
|
||||
elif start_uncovered == 0 and end_uncovered == 0:
|
||||
return "as requested"
|
||||
else:
|
||||
return "which doesn't match the request"
|
||||
|
|
|
@ -11,6 +11,7 @@ class Format:
|
|||
symbol: str = "rightspace"
|
||||
datesep: str = "-"
|
||||
csvdelim: str = ","
|
||||
jsonnums: bool = False
|
||||
|
||||
@classmethod
|
||||
def fromargs(cls, args):
|
||||
|
@ -27,6 +28,7 @@ class Format:
|
|||
symbol=if_not_none(args.formatsymbol, default.symbol),
|
||||
datesep=if_not_none(args.formatdatesep, default.datesep),
|
||||
csvdelim=if_not_none(args.formatcsvdelim, default.csvdelim),
|
||||
jsonnums=if_not_none(args.formatjsonnums, default.jsonnums),
|
||||
)
|
||||
|
||||
def format_date(self, date):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" ISO 4217 Currency data
|
||||
"""
|
||||
ISO 4217 Currency data
|
||||
|
||||
Provides `ISO 4217 <https://www.iso.org/iso-4217-currency-codes.html>`_
|
||||
currency data in a ready-to-use format, indexed by currency code. Historical
|
||||
|
@ -7,8 +8,8 @@ currencies are included and countries with no universal currency are ignored.
|
|||
The data is read from vendored copies of the XML files published by the
|
||||
maintainers of the standard:
|
||||
|
||||
* :file:`list_one.xml` (current currencies & funds)
|
||||
* :file:`list_three.xml` (historical currencies & funds)
|
||||
* :file:`list-one.xml` (current currencies & funds)
|
||||
* :file:`list-three.xml` (historical currencies & funds)
|
||||
|
||||
Classes:
|
||||
|
||||
|
@ -23,7 +24,8 @@ Functions:
|
|||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from importlib.resources import read_binary
|
||||
from importlib.resources import files
|
||||
from typing import List
|
||||
|
||||
from lxml import etree
|
||||
|
||||
|
@ -35,26 +37,34 @@ class ISOCurrency:
|
|||
minor_units: int = None
|
||||
name: str = None
|
||||
is_fund: bool = False
|
||||
countries: list[str] = field(default_factory=list)
|
||||
countries: List[str] = field(default_factory=list)
|
||||
historical: bool = False
|
||||
withdrawal_date: str = None
|
||||
|
||||
|
||||
def current_data_date():
|
||||
one = etree.fromstring(read_binary("pricehist.resources", "list_one.xml"))
|
||||
one = etree.fromstring(
|
||||
files("pricehist.resources").joinpath("list-one.xml").read_bytes()
|
||||
)
|
||||
return one.cssselect("ISO_4217")[0].attrib["Pblshd"]
|
||||
|
||||
|
||||
def historical_data_date():
|
||||
three = etree.fromstring(read_binary("pricehist.resources", "list_three.xml"))
|
||||
three = etree.fromstring(
|
||||
files("pricehist.resources").joinpath("list-three.xml").read_bytes()
|
||||
)
|
||||
return three.cssselect("ISO_4217")[0].attrib["Pblshd"]
|
||||
|
||||
|
||||
def by_code():
|
||||
result = {}
|
||||
|
||||
one = etree.fromstring(read_binary("pricehist.resources", "list_one.xml"))
|
||||
three = etree.fromstring(read_binary("pricehist.resources", "list_three.xml"))
|
||||
one = etree.fromstring(
|
||||
files("pricehist.resources").joinpath("list-one.xml").read_bytes()
|
||||
)
|
||||
three = etree.fromstring(
|
||||
files("pricehist.resources").joinpath("list-three.xml").read_bytes()
|
||||
)
|
||||
|
||||
for entry in three.cssselect("HstrcCcyNtry") + one.cssselect("CcyNtry"):
|
||||
if currency := _parse(entry):
|
||||
|
@ -89,13 +99,11 @@ def _parse(entry):
|
|||
except (IndexError, ValueError):
|
||||
minor_units = None
|
||||
|
||||
name_tags = entry.cssselect("CcyNm")
|
||||
if name_tags:
|
||||
name = None
|
||||
is_fund = None
|
||||
if name_tags := entry.cssselect("CcyNm"):
|
||||
name = name_tags[0].text
|
||||
is_fund = name_tags[0].attrib.get("IsFund", "").upper() in ["TRUE", "WAHR"]
|
||||
else:
|
||||
name = None
|
||||
is_fund = None
|
||||
|
||||
countries = [t.text for t in entry.cssselect("CtryNm")]
|
||||
|
||||
|
|
|
@ -4,11 +4,18 @@ import sys
|
|||
|
||||
class Formatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
message = record.msg % record.args if record.args else record.msg
|
||||
if record.levelno == logging.INFO:
|
||||
return message
|
||||
else:
|
||||
return f"{record.levelname} {message}"
|
||||
s = record.msg % record.args if record.args else record.msg
|
||||
|
||||
if record.exc_info:
|
||||
record.exc_text = self.formatException(record.exc_info)
|
||||
if s[-1:] != "\n":
|
||||
s = s + "\n"
|
||||
s = s + "\n".join([f" {line}" for line in record.exc_text.splitlines()])
|
||||
|
||||
if record.levelno != logging.INFO:
|
||||
s = "\n".join([f"{record.levelname} {line}" for line in s.splitlines()])
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def init():
|
||||
|
@ -16,6 +23,7 @@ def init():
|
|||
handler.setFormatter(Formatter())
|
||||
logging.root.addHandler(handler)
|
||||
logging.root.setLevel(logging.INFO)
|
||||
logging.getLogger("charset_normalizer").disabled = True
|
||||
|
||||
|
||||
def show_debug():
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from .beancount import Beancount
|
||||
from .csv import CSV
|
||||
from .gnucashsql import GnuCashSQL
|
||||
from .json import JSON
|
||||
from .ledger import Ledger
|
||||
|
||||
default = "csv"
|
||||
|
@ -8,6 +9,8 @@ default = "csv"
|
|||
by_type = {
|
||||
"beancount": Beancount(),
|
||||
"csv": CSV(),
|
||||
"json": JSON(),
|
||||
"jsonl": JSON(jsonl=True),
|
||||
"gnucash-sql": GnuCashSQL(),
|
||||
"ledger": Ledger(),
|
||||
}
|
||||
|
|
|
@ -8,4 +8,4 @@ from pricehist.sources.basesource import BaseSource
|
|||
class BaseOutput(ABC):
|
||||
@abstractmethod
|
||||
def format(self, series: Series, source: BaseSource, fmt: Format) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
|
|
@ -1,3 +1,27 @@
|
|||
"""
|
||||
Beancount output
|
||||
|
||||
Supports the `Beancount <https://beancount.github.io/>`_ plain text accounting
|
||||
format.
|
||||
|
||||
The default output should be valid for Beancount. Customizing it via formatting
|
||||
options may generate invalid output, so users should keep the requirements of
|
||||
the Beancount format in mind.
|
||||
|
||||
Relevant sections of the Beancount documentation:
|
||||
|
||||
* `Commodities / Currencies
|
||||
<https://beancount.github.io/docs/beancount_language_syntax.html#commodities-currencies>`_
|
||||
* `Prices <https://beancount.github.io/docs/beancount_language_syntax.html#prices>`_
|
||||
* `Fetching Prices in Beancount
|
||||
<https://beancount.github.io/docs/fetching_prices_in_beancount.html>`_
|
||||
|
||||
Classes:
|
||||
|
||||
Beancount
|
||||
|
||||
"""
|
||||
|
||||
from pricehist.format import Format
|
||||
|
||||
from .baseoutput import BaseOutput
|
||||
|
@ -7,19 +31,9 @@ class Beancount(BaseOutput):
|
|||
def format(self, series, source=None, fmt=Format()):
|
||||
output = ""
|
||||
for price in series.prices:
|
||||
# TODO warn if fmt settings make an invalid number (not . for decimal)
|
||||
# TODO warn if fmt settings make an invalid quote (not right/rightspace)
|
||||
date = fmt.format_date(price.date)
|
||||
base = fmt.base or series.base
|
||||
quote = fmt.quote or series.quote
|
||||
quote_amount = fmt.format_quote_amount(quote, price.amount)
|
||||
output += f"{date} price {base} {quote_amount}\n"
|
||||
return output
|
||||
|
||||
|
||||
# NOTE: Beancount always has commodity to the right. It seems to be possible to
|
||||
# skip the space, according to https://plaintextaccounting.org/quickref/#h.n4b87oz9ku6t
|
||||
|
||||
# https://beancount.github.io/docs/fetching_prices_in_beancount.html
|
||||
# https://beancount.github.io/docs/beancount_language_syntax.html#commodities-currencies
|
||||
# https://beancount.github.io/docs/beancount_language_syntax.html#comments
|
||||
|
|
|
@ -1,3 +1,20 @@
|
|||
"""
|
||||
CSV output
|
||||
|
||||
Comma Separated Values output is easily processed with other command-line tools
|
||||
or imported into a spreadsheet or database.
|
||||
|
||||
Python's `csv <https://docs.python.org/3/library/csv.html>`_ module is used to
|
||||
produce Excel-style CSV output, except with UNIX-style line endings. The field
|
||||
delimiter can be set with a formatting option, and date, number and base/quote
|
||||
formatting options will be respected.
|
||||
|
||||
Classes:
|
||||
|
||||
CSV
|
||||
|
||||
"""
|
||||
|
||||
import csv
|
||||
import io
|
||||
|
||||
|
@ -7,7 +24,7 @@ from .baseoutput import BaseOutput
|
|||
|
||||
|
||||
class CSV(BaseOutput):
|
||||
def format(self, series, source=None, fmt=Format()):
|
||||
def format(self, series, source, fmt=Format()):
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(
|
||||
output,
|
||||
|
|
|
@ -1,8 +1,48 @@
|
|||
"""
|
||||
GnuCash SQL output
|
||||
|
||||
Support for the `GnuCash <https://www.gnucash.org/>`_ accounting program is
|
||||
achieved by generating SQL that can later be applied to a GnuCash database.
|
||||
|
||||
This allows pricehist to support GnuCash with simple text output rather than by
|
||||
depending on GnuCash Python bindings or direct database interaction.
|
||||
|
||||
The generated SQL can be run in SQLite, MariaDB/MySQL or PostgreSQL.
|
||||
|
||||
Rows in GnuCash's prices table must include GUIDs for the related commodities.
|
||||
The generated SQL selects the relevant GUIDs by mnemonic from the commodities
|
||||
table and stores them in a temporary table. Another temprary table is populated
|
||||
with new price data and the two are joined to produce the new rows that are
|
||||
inserted into the prices table.
|
||||
|
||||
Users need to ensure that the base and quote of the new prices already have
|
||||
commodities with matching mnemonics in the GnuCash database. If this condition
|
||||
is not met, the SQL will fail without making changes. The names of the base and
|
||||
quote can be adjusted with pricehist formatting options in case the source and
|
||||
GnuCash names don't already match. Other formatting options can adjust date
|
||||
formatting and the time of day used.
|
||||
|
||||
Each row in the prices table has a GUID of its own. These are generated in
|
||||
pricehist by hashing the price data, so the same GUID will always be used for a
|
||||
given date, base, quote, source, type & amount. Existing GUIDs are skipped
|
||||
during the final insert into the prices table, so there's no problem with
|
||||
running one SQL file multiple times or running multiple SQL files with
|
||||
overlapping data.
|
||||
|
||||
Warnings are generated when string escaping or number limit issues are detected
|
||||
and it should be easy for users to avoid those issues.
|
||||
|
||||
Classes:
|
||||
|
||||
GnuCashSQL
|
||||
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from importlib.resources import read_text
|
||||
from importlib.resources import files
|
||||
|
||||
from pricehist import __version__
|
||||
from pricehist.format import Format
|
||||
|
@ -11,13 +51,14 @@ from .baseoutput import BaseOutput
|
|||
|
||||
|
||||
class GnuCashSQL(BaseOutput):
|
||||
def format(self, series, source=None, fmt=Format()):
|
||||
def format(self, series, source, fmt=Format()):
|
||||
base = fmt.base or series.base
|
||||
quote = fmt.quote or series.quote
|
||||
src = f"pricehist:{source.id()}"
|
||||
src = source.id()
|
||||
|
||||
self._warn_about_backslashes(
|
||||
{
|
||||
"date": fmt.format_date("1970-01-01"),
|
||||
"time": fmt.time,
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
|
@ -70,7 +111,7 @@ class GnuCashSQL(BaseOutput):
|
|||
if too_big:
|
||||
# https://code.gnucash.org/docs/MAINT/group__Numeric.html
|
||||
# https://code.gnucash.org/docs/MAINT/structgnc__price__s.html
|
||||
logging.warn(
|
||||
logging.warning(
|
||||
"This SQL contains numbers outside of the int64 range required "
|
||||
"by GnuCash for the numerators and denominators of prices. "
|
||||
"Using the --quantize option to limit the number of decimal "
|
||||
|
@ -78,13 +119,18 @@ class GnuCashSQL(BaseOutput):
|
|||
"well."
|
||||
)
|
||||
|
||||
sql = read_text("pricehist.resources", "gnucash.sql").format(
|
||||
version=__version__,
|
||||
timestamp=datetime.utcnow().isoformat() + "Z",
|
||||
base=self._sql_str(base),
|
||||
quote=self._sql_str(quote),
|
||||
values_comment=values_comment,
|
||||
values=values,
|
||||
sql = (
|
||||
files("pricehist.resources")
|
||||
.joinpath("gnucash.sql")
|
||||
.read_text()
|
||||
.format(
|
||||
version=__version__,
|
||||
timestamp=datetime.now(timezone.utc).isoformat()[:-6] + "Z",
|
||||
base=self._sql_str(base),
|
||||
quote=self._sql_str(quote),
|
||||
values_comment=values_comment,
|
||||
values=values,
|
||||
)
|
||||
)
|
||||
|
||||
return sql
|
||||
|
@ -92,10 +138,10 @@ class GnuCashSQL(BaseOutput):
|
|||
def _warn_about_backslashes(self, fields):
|
||||
hits = [name for name, value in fields.items() if "\\" in value]
|
||||
if hits:
|
||||
logging.warn(
|
||||
logging.warning(
|
||||
f"Before running this SQL, check the formatting of the "
|
||||
f"{self._english_join(hits)} strings. "
|
||||
f"SQLite treats backslahes in strings as plain characters, but "
|
||||
f"SQLite treats backslashes in strings as plain characters, but "
|
||||
f"MariaDB/MySQL and PostgreSQL may interpret them as escape "
|
||||
f"codes."
|
||||
)
|
||||
|
@ -109,11 +155,11 @@ class GnuCashSQL(BaseOutput):
|
|||
return f"{', '.join(strings[0:-1])} and {strings[-1]}"
|
||||
|
||||
def _sql_str(self, s):
|
||||
# Documentation regarding SQL string literals
|
||||
# - https://www.sqlite.org/lang_expr.html#literal_values_constants_
|
||||
# - https://mariadb.com/kb/en/string-literals/
|
||||
# - https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
|
||||
# - https://www.postgresql.org/docs/devel/sql-syntax-lexical.html
|
||||
# Documentation regarding SQL string literals:
|
||||
# * https://www.sqlite.org/lang_expr.html#literal_values_constants_
|
||||
# * https://mariadb.com/kb/en/string-literals/
|
||||
# * https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
|
||||
# * https://www.postgresql.org/docs/devel/sql-syntax-lexical.html
|
||||
escaped = s.replace("'", "''")
|
||||
quoted = f"'{escaped}'"
|
||||
return quoted
|
||||
|
@ -128,9 +174,9 @@ class GnuCashSQL(BaseOutput):
|
|||
denom = str(1)
|
||||
else:
|
||||
numerator = sign + "".join([str(d) for d in tup.digits])
|
||||
denom = str(10 ** -tup.exponent)
|
||||
denom = str(10**-tup.exponent)
|
||||
fit = self._fit_in_int64(Decimal(numerator), Decimal(denom))
|
||||
return (numerator, denom, fit)
|
||||
|
||||
def _fit_in_int64(self, *numbers):
|
||||
return all(n >= -(2 ** 63) and n <= (2 ** 63) - 1 for n in numbers)
|
||||
return all(n >= -(2**63) and n <= (2**63) - 1 for n in numbers)
|
||||
|
|
57
src/pricehist/outputs/json.py
Normal file
57
src/pricehist/outputs/json.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
"""
|
||||
JSON output
|
||||
|
||||
Date, number and base/quote formatting options will be respected.
|
||||
|
||||
Classes:
|
||||
|
||||
JSON
|
||||
|
||||
"""
|
||||
|
||||
import io
|
||||
import json
|
||||
|
||||
from pricehist.format import Format
|
||||
|
||||
from .baseoutput import BaseOutput
|
||||
|
||||
|
||||
class JSON(BaseOutput):
|
||||
def __init__(self, jsonl=False):
|
||||
self.jsonl = jsonl
|
||||
|
||||
def format(self, series, source, fmt=Format()):
|
||||
data = []
|
||||
output = io.StringIO()
|
||||
|
||||
base = fmt.base or series.base
|
||||
quote = fmt.quote or series.quote
|
||||
|
||||
for price in series.prices:
|
||||
date = fmt.format_date(price.date)
|
||||
if fmt.jsonnums:
|
||||
amount = float(price.amount)
|
||||
else:
|
||||
amount = fmt.format_num(price.amount)
|
||||
|
||||
data.append(
|
||||
{
|
||||
"date": date,
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"amount": amount,
|
||||
"source": source.id(),
|
||||
"type": series.type,
|
||||
}
|
||||
)
|
||||
|
||||
if self.jsonl:
|
||||
for row in data:
|
||||
json.dump(row, output, ensure_ascii=False)
|
||||
output.write("\n")
|
||||
else:
|
||||
json.dump(data, output, ensure_ascii=False, indent=2)
|
||||
output.write("\n")
|
||||
|
||||
return output.getvalue()
|
|
@ -1,3 +1,32 @@
|
|||
"""
|
||||
Ledger output
|
||||
|
||||
Supports both `Ledger <https://www.ledger-cli.org/>`_ and
|
||||
`hledger <https://hledger.org/>`_ plain text accounting formats.
|
||||
|
||||
By default the output should be valid for Ledger, but can be customized for
|
||||
hledger or other variants via formatting options. Invalid variants are
|
||||
possible, so the user should be familiar with the requirements of the target
|
||||
format.
|
||||
|
||||
Relevant sections of the Ledger manual:
|
||||
|
||||
* `Commodities and Currencies
|
||||
<https://www.ledger-cli.org/3.0/doc/ledger3.html#Commodities-and-Currencies>`_
|
||||
* `Commoditized Amounts
|
||||
<https://www.ledger-cli.org/3.0/doc/ledger3.html#Commoditized-Amounts>`_
|
||||
|
||||
Relevant sections of the hledger manual:
|
||||
|
||||
* `Declaring market prices <https://hledger.org/hledger.html#declaring-market-prices>`_:
|
||||
* `Declaring commodities <https://hledger.org/hledger.html#declaring-commodities`_:
|
||||
|
||||
Classes:
|
||||
|
||||
Ledger
|
||||
|
||||
"""
|
||||
|
||||
from pricehist.format import Format
|
||||
|
||||
from .baseoutput import BaseOutput
|
||||
|
@ -11,26 +40,6 @@ class Ledger(BaseOutput):
|
|||
base = fmt.base or series.base
|
||||
quote = fmt.quote or series.quote
|
||||
quote_amount = fmt.format_quote_amount(quote, price.amount)
|
||||
output += f"P {date} {fmt.time} {base} {quote_amount}\n"
|
||||
timesep = " " if fmt.time else ""
|
||||
output += f"P {date}{timesep}{fmt.time} {base} {quote_amount}\n"
|
||||
return output
|
||||
|
||||
# https://www.ledger-cli.org/3.0/doc/ledger3.html#Commodities-and-Currencies
|
||||
# > The commodity may be any non-numeric string that does not contain a
|
||||
# > period, comma, forward slash or at-sign. It may appear before or after
|
||||
# > the amount, although it is assumed that symbols appearing before the
|
||||
# > amount refer to currencies, while non-joined symbols appearing after the
|
||||
# > amount refer to commodities.
|
||||
|
||||
# https://www.ledger-cli.org/3.0/doc/ledger3.html#Commoditized-Amounts
|
||||
# > A commoditized amount is an integer amount which has an associated
|
||||
# > commodity. This commodity can appear before or after the amount, and may
|
||||
# > or may not be separated from it by a space. Most characters are allowed
|
||||
# > in a commodity name, except for the following:
|
||||
# > - Any kind of white-space
|
||||
# > - Numerical digits
|
||||
# > - Punctuation: .,;:?!
|
||||
# > - Mathematical and logical operators: -+*/^&|=
|
||||
# > - Bracketing characters: <>[](){}
|
||||
# > - The at symbol: @
|
||||
# > And yet, any of these may appear in a commodity name if it is
|
||||
# > surrounded by double quotes
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-- Created by pricehist v{version} at {timestamp}
|
||||
-- Created by pricehist {version} at {timestamp}
|
||||
|
||||
BEGIN;
|
||||
|
||||
|
@ -35,10 +35,10 @@ WHERE tp.base = g1.mnemonic
|
|||
AND tp.guid NOT IN (SELECT guid FROM prices)
|
||||
;
|
||||
|
||||
-- Show the summary.
|
||||
SELECT * FROM summary;
|
||||
|
||||
-- Show the final relevant rows of the main prices table
|
||||
SELECT 'final' AS status, p.* FROM prices p WHERE p.guid IN (SELECT guid FROM new_prices) ORDER BY p.date;
|
||||
|
||||
-- Show the summary.
|
||||
SELECT * FROM summary;
|
||||
|
||||
COMMIT;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<ISO_4217 Pblshd="2018-08-29">
|
||||
<ISO_4217 Pblshd="2023-01-01">
|
||||
<CcyTbl>
|
||||
<CcyNtry>
|
||||
<CtryNm>AFGHANISTAN</CtryNm>
|
||||
|
@ -413,9 +413,9 @@
|
|||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>CROATIA</CtryNm>
|
||||
<CcyNm>Kuna</CcyNm>
|
||||
<Ccy>HRK</Ccy>
|
||||
<CcyNbr>191</CcyNbr>
|
||||
<CcyNm>Euro</CcyNm>
|
||||
<Ccy>EUR</Ccy>
|
||||
<CcyNbr>978</CcyNbr>
|
||||
<CcyMnrUnts>2</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
|
@ -1493,6 +1493,13 @@
|
|||
<CcyNbr>694</CcyNbr>
|
||||
<CcyMnrUnts>2</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>SIERRA LEONE</CtryNm>
|
||||
<CcyNm>Leone</CcyNm>
|
||||
<Ccy>SLE</Ccy>
|
||||
<CcyNbr>925</CcyNbr>
|
||||
<CcyMnrUnts>2</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>SINGAPORE</CtryNm>
|
||||
<CcyNm>Singapore Dollar</CcyNm>
|
||||
|
@ -1701,7 +1708,7 @@
|
|||
<CcyMnrUnts>3</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>TURKEY</CtryNm>
|
||||
<CtryNm>TÜRKİYE</CtryNm>
|
||||
<CcyNm>Turkish Lira</CcyNm>
|
||||
<Ccy>TRY</Ccy>
|
||||
<CcyNbr>949</CcyNbr>
|
||||
|
@ -1819,6 +1826,13 @@
|
|||
<CcyNbr>928</CcyNbr>
|
||||
<CcyMnrUnts>2</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>VENEZUELA (BOLIVARIAN REPUBLIC OF)</CtryNm>
|
||||
<CcyNm>Bolívar Soberano</CcyNm>
|
||||
<Ccy>VED</Ccy>
|
||||
<CcyNbr>926</CcyNbr>
|
||||
<CcyMnrUnts>2</CcyMnrUnts>
|
||||
</CcyNtry>
|
||||
<CcyNtry>
|
||||
<CtryNm>VIET NAM</CtryNm>
|
||||
<CcyNm>Dong</CcyNm>
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<ISO_4217 Pblshd="2018-08-20">
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<ISO_4217 Pblshd="2023-01-01">
|
||||
<HstrcCcyTbl>
|
||||
<HstrcCcyNtry>
|
||||
<CtryNm>AFGHANISTAN</CtryNm>
|
||||
|
@ -253,6 +253,13 @@
|
|||
<CcyNbr>191</CcyNbr>
|
||||
<WthdrwlDt>2015-06</WthdrwlDt>
|
||||
</HstrcCcyNtry>
|
||||
<HstrcCcyNtry>
|
||||
<CtryNm>CROATIA</CtryNm>
|
||||
<CcyNm>Kuna</CcyNm>
|
||||
<Ccy>HRK</Ccy>
|
||||
<CcyNbr>191</CcyNbr>
|
||||
<WthdrwlDt>2023-01</WthdrwlDt>
|
||||
</HstrcCcyNtry>
|
||||
<HstrcCcyNtry>
|
||||
<CtryNm>CYPRUS</CtryNm>
|
||||
<CcyNm>Cyprus Pound</CcyNm>
|
|
@ -1,5 +1,6 @@
|
|||
from dataclasses import dataclass, field, replace
|
||||
from decimal import Decimal, getcontext
|
||||
from typing import List
|
||||
|
||||
from pricehist.price import Price
|
||||
|
||||
|
@ -11,7 +12,7 @@ class Series:
|
|||
type: str
|
||||
start: str
|
||||
end: str
|
||||
prices: list[Price] = field(default_factory=list)
|
||||
prices: List[Price] = field(default_factory=list)
|
||||
|
||||
def invert(self):
|
||||
return replace(
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from .alphavantage import AlphaVantage
|
||||
from .bankofcanada import BankOfCanada
|
||||
from .coinbasepro import CoinbasePro
|
||||
from .coindesk import CoinDesk
|
||||
from .coinmarketcap import CoinMarketCap
|
||||
from .ecb import ECB
|
||||
|
@ -6,7 +8,15 @@ from .yahoo import Yahoo
|
|||
|
||||
by_id = {
|
||||
source.id(): source
|
||||
for source in [AlphaVantage(), CoinDesk(), CoinMarketCap(), ECB(), Yahoo()]
|
||||
for source in [
|
||||
AlphaVantage(),
|
||||
BankOfCanada(),
|
||||
CoinbasePro(),
|
||||
CoinDesk(),
|
||||
CoinMarketCap(),
|
||||
ECB(),
|
||||
Yahoo(),
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
import csv
|
||||
import dataclasses
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from typing import List, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import __version__, exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -15,6 +17,7 @@ from .basesource import BaseSource
|
|||
|
||||
class AlphaVantage(BaseSource):
|
||||
QUERY_URL = "https://www.alphavantage.co/query"
|
||||
API_KEY_NAME = "ALPHAVANTAGE_API_KEY"
|
||||
|
||||
def id(self):
|
||||
return "alphavantage"
|
||||
|
@ -35,24 +38,26 @@ class AlphaVantage(BaseSource):
|
|||
return ["close", "open", "high", "low", "adjclose", "mid"]
|
||||
|
||||
def notes(self):
|
||||
keystatus = "already set" if self._apikey(require=False) else "NOT YET set"
|
||||
keystatus = "already set" if self._apikey(require=False) else "not yet set"
|
||||
return (
|
||||
"Alpha Vantage has data on digital (crypto) currencies, physical "
|
||||
"(fiat) currencies and stocks.\n"
|
||||
"An API key is required. One can be obtained for free from "
|
||||
"https://www.alphavantage.co/support/#api-key and should be made "
|
||||
"available in the ALPHAVANTAGE_API_KEY environment variable "
|
||||
f"({keystatus}).\n"
|
||||
"You should obtain a free API key from "
|
||||
"https://www.alphavantage.co/support/#api-key and set it in "
|
||||
f"the {self.API_KEY_NAME} environment variable ({keystatus}), "
|
||||
"otherwise, pricehist will attempt to use a generic key.\n"
|
||||
"The PAIR for currencies should be in BASE/QUOTE form. The quote "
|
||||
"symbol must always be for a physical currency. The --symbols option "
|
||||
"will list all digital and physical currency symbols.\n"
|
||||
"The PAIR for stocks is the stock symbol only. The quote currency "
|
||||
f"will be determined automatically. {self._stock_symbols_message()}\n"
|
||||
"The price type 'adjclose' is only available for stocks.\n"
|
||||
"Alpha Vantage's standard API call frequency limits is 5 calls per "
|
||||
"minute and 500 per day, so you may need to pause between successive "
|
||||
"commands. Note that retrieving prices for one stock requires two "
|
||||
"calls."
|
||||
"The price type 'adjclose' is only available for stocks, and "
|
||||
"requires an access key for which premium endpoints are unlocked.\n"
|
||||
"Beware that digital currencies quoted in non-USD currencies may "
|
||||
"be converted from USD data at one recent exchange rate rather "
|
||||
"than using historical rates.\n"
|
||||
"Alpha Vantage's standard API rate limit is 25 requests per day. "
|
||||
"Note that retrieving prices for one stock consumes two API calls."
|
||||
)
|
||||
|
||||
def _stock_symbols_message(self):
|
||||
|
@ -85,20 +90,38 @@ class AlphaVantage(BaseSource):
|
|||
output_quote = series.quote
|
||||
|
||||
if series.quote == "":
|
||||
output_quote = self._stock_currency(output_base)
|
||||
data = self._stock_data(series)
|
||||
output_quote, data = self._stock_data(series)
|
||||
else:
|
||||
if series.type == "adjclose":
|
||||
logging.critical(
|
||||
"The 'adjclose' price type is only available for stocks. "
|
||||
"Use 'close' instead."
|
||||
raise exceptions.InvalidType(
|
||||
series.type, series.base, series.quote, self
|
||||
)
|
||||
exit(1)
|
||||
elif series.base in [s for s, n in self._physical_symbols()]:
|
||||
|
||||
physical_symbols = [s for s, n in self._physical_symbols()]
|
||||
|
||||
if series.quote not in physical_symbols:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base,
|
||||
series.quote,
|
||||
self,
|
||||
"When given, the quote must be a physical currency.",
|
||||
)
|
||||
|
||||
if series.base in physical_symbols:
|
||||
data = self._physical_data(series)
|
||||
else:
|
||||
|
||||
elif series.base in [s for s, n in self._digital_symbols()]:
|
||||
data = self._digital_data(series)
|
||||
|
||||
else:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base,
|
||||
series.quote,
|
||||
self,
|
||||
"When a quote currency is given, the base must be a known "
|
||||
"physical or digital currency.",
|
||||
)
|
||||
|
||||
prices = [
|
||||
Price(day, amount)
|
||||
for day, entries in data.items()
|
||||
|
@ -112,7 +135,7 @@ class AlphaVantage(BaseSource):
|
|||
def _amount(self, day, entries, series):
|
||||
if day < series.start or day > series.end:
|
||||
return None
|
||||
elif type == "mid":
|
||||
elif series.type == "mid":
|
||||
return sum([Decimal(entries["high"]), Decimal(entries["low"])]) / 2
|
||||
else:
|
||||
return Decimal(entries[series.type])
|
||||
|
@ -122,7 +145,7 @@ class AlphaVantage(BaseSource):
|
|||
for match in data["bestMatches"]:
|
||||
if match["1. symbol"] == symbol:
|
||||
return match["8. currency"]
|
||||
return "Unknown"
|
||||
return None
|
||||
|
||||
def _search_data(self, keywords: str):
|
||||
params = {
|
||||
|
@ -130,30 +153,91 @@ class AlphaVantage(BaseSource):
|
|||
"keywords": keywords,
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
data = json.loads(response.content)
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
self._raise_for_generic_errors(data)
|
||||
|
||||
expected_keys = ["1. symbol", "2. name", "3. type", "4. region", "8. currency"]
|
||||
if (
|
||||
type(data) is not dict
|
||||
or "bestMatches" not in data
|
||||
or type(data["bestMatches"]) is not list
|
||||
or not all(k in m for k in expected_keys for m in data["bestMatches"])
|
||||
):
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
return data
|
||||
|
||||
def _stock_data(self, series):
|
||||
output_quote = self._stock_currency(series.base) or "UNKNOWN"
|
||||
|
||||
if series.type == "adjclose":
|
||||
function = "TIME_SERIES_DAILY_ADJUSTED"
|
||||
else:
|
||||
function = "TIME_SERIES_DAILY"
|
||||
|
||||
params = {
|
||||
"function": "TIME_SERIES_DAILY_ADJUSTED",
|
||||
"function": function,
|
||||
"symbol": series.base,
|
||||
"outputsize": self._outputsize(series.start),
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
data = json.loads(response.content)
|
||||
normalized_data = {
|
||||
day: {
|
||||
"open": entries["1. open"],
|
||||
"high": entries["2. high"],
|
||||
"low": entries["3. low"],
|
||||
"close": entries["4. close"],
|
||||
"adjclose": entries["5. adjusted close"],
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
self._raise_for_generic_errors(data)
|
||||
|
||||
if "Error Message" in data:
|
||||
if output_quote == "UNKNOWN":
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Unknown stock symbol."
|
||||
)
|
||||
else:
|
||||
raise exceptions.BadResponse(data["Error Message"])
|
||||
|
||||
try:
|
||||
normalized_data = {
|
||||
day: {
|
||||
"open": entries["1. open"],
|
||||
"high": entries["2. high"],
|
||||
"low": entries["3. low"],
|
||||
"close": entries["4. close"],
|
||||
"adjclose": "5. adjusted close" in entries
|
||||
and entries["5. adjusted close"],
|
||||
}
|
||||
for day, entries in reversed(data["Time Series (Daily)"].items())
|
||||
}
|
||||
for day, entries in reversed(data["Time Series (Daily)"].items())
|
||||
}
|
||||
return normalized_data
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.") from e
|
||||
|
||||
return output_quote, normalized_data
|
||||
|
||||
def _physical_data(self, series):
|
||||
params = {
|
||||
|
@ -163,8 +247,27 @@ class AlphaVantage(BaseSource):
|
|||
"outputsize": self._outputsize(series.start),
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
data = json.loads(response.content)
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
self._raise_for_generic_errors(data)
|
||||
|
||||
if type(data) is not dict or "Time Series FX (Daily)" not in data:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
normalized_data = {
|
||||
day: {k[3:]: v for k, v in entries.items()}
|
||||
for day, entries in reversed(data["Time Series FX (Daily)"].items())
|
||||
|
@ -185,14 +288,33 @@ class AlphaVantage(BaseSource):
|
|||
"market": series.quote,
|
||||
"apikey": self._apikey(),
|
||||
}
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
data = json.loads(response.content)
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(self.QUERY_URL, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
self._raise_for_generic_errors(data)
|
||||
|
||||
if type(data) is not dict or "Time Series (Digital Currency Daily)" not in data:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
normalized_data = {
|
||||
day: {
|
||||
"open": entries[f"1a. open ({series.quote})"],
|
||||
"high": entries[f"2a. high ({series.quote})"],
|
||||
"low": entries[f"3a. low ({series.quote})"],
|
||||
"close": entries[f"4a. close ({series.quote})"],
|
||||
"open": entries["1. open"],
|
||||
"high": entries["2. high"],
|
||||
"low": entries["3. low"],
|
||||
"close": entries["4. close"],
|
||||
}
|
||||
for day, entries in reversed(
|
||||
data["Time Series (Digital Currency Daily)"].items()
|
||||
|
@ -201,27 +323,57 @@ class AlphaVantage(BaseSource):
|
|||
return normalized_data
|
||||
|
||||
def _apikey(self, require=True):
|
||||
key_name = "ALPHAVANTAGE_API_KEY"
|
||||
key = os.getenv(key_name)
|
||||
key = os.getenv(self.API_KEY_NAME)
|
||||
if require and not key:
|
||||
logging.critical(
|
||||
f"The environment variable {key_name} is empty. "
|
||||
"Get a free API key from https://www.alphavantage.co/support/#api-key, "
|
||||
f'export {key_name}="YOUR_OWN_API_KEY" and retry.'
|
||||
generic_key = f"pricehist_{__version__}"
|
||||
logging.debug(
|
||||
f"{self.API_KEY_NAME} not set. "
|
||||
f"Defaulting to generic key '{generic_key}'."
|
||||
)
|
||||
exit(1)
|
||||
return generic_key
|
||||
return key
|
||||
|
||||
def _physical_symbols(self) -> list[(str, str)]:
|
||||
url = "https://www.alphavantage.co/physical_currency_list/"
|
||||
response = self.log_curl(requests.get(url))
|
||||
lines = response.content.decode("utf-8").splitlines()
|
||||
data = csv.reader(lines[1:], delimiter=",")
|
||||
return [(s, f"Physical: {n}") for s, n in data]
|
||||
def _raise_for_generic_errors(self, data):
|
||||
if type(data) is dict:
|
||||
if "Information" in data and "daily rate limits" in data["Information"]:
|
||||
raise exceptions.RateLimit(data["Information"])
|
||||
if (
|
||||
"Information" in data
|
||||
and "unlock" in data["Information"]
|
||||
and "premium" in data["Information"]
|
||||
):
|
||||
msg = "You were denied access to a premium endpoint."
|
||||
raise exceptions.CredentialsError([self.API_KEY_NAME], self, msg)
|
||||
if "Error Message" in data and "apikey " in data["Error Message"]:
|
||||
raise exceptions.CredentialsError([self.API_KEY_NAME], self)
|
||||
|
||||
def _digital_symbols(self) -> list[(str, str)]:
|
||||
def _physical_symbols(self) -> List[Tuple[str, str]]:
|
||||
url = "https://www.alphavantage.co/physical_currency_list/"
|
||||
return self._get_symbols(url, "Physical: ")
|
||||
|
||||
def _digital_symbols(self) -> List[Tuple[str, str]]:
|
||||
url = "https://www.alphavantage.co/digital_currency_list/"
|
||||
response = self.log_curl(requests.get(url))
|
||||
lines = response.content.decode("utf-8").splitlines()
|
||||
data = csv.reader(lines[1:], delimiter=",")
|
||||
return [(s, f"Digital: {n}") for s, n in data]
|
||||
return self._get_symbols(url, "Digital: ")
|
||||
|
||||
def _get_symbols(self, url, prefix) -> List[Tuple[str, str]]:
|
||||
try:
|
||||
response = self.log_curl(requests.get(url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
lines = response.content.decode("utf-8").splitlines()
|
||||
data = csv.reader(lines[1:], delimiter=",")
|
||||
results = [(s, f"{prefix}{n}") for s, n in data]
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if len(results) == 0:
|
||||
raise exceptions.ResponseParsingError("Symbols data missing.")
|
||||
|
||||
return results
|
||||
|
|
118
src/pricehist/sources/bankofcanada.py
Normal file
118
src/pricehist/sources/bankofcanada.py
Normal file
|
@ -0,0 +1,118 @@
|
|||
import dataclasses
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
||||
|
||||
class BankOfCanada(BaseSource):
|
||||
def id(self):
|
||||
return "bankofcanada"
|
||||
|
||||
def name(self):
|
||||
return "Bank of Canada"
|
||||
|
||||
def description(self):
|
||||
return "Daily exchange rates of the Canadian dollar from the Bank of Canada"
|
||||
|
||||
def source_url(self):
|
||||
return "https://www.bankofcanada.ca/valet/docs"
|
||||
|
||||
def start(self):
|
||||
return "2017-01-03"
|
||||
|
||||
def types(self):
|
||||
return ["default"]
|
||||
|
||||
def notes(self):
|
||||
return (
|
||||
"Currently, only daily exchange rates are supported. They are "
|
||||
"published once each business day by 16:30 ET. "
|
||||
"All Bank of Canada exchange rates are indicative rates only.\n"
|
||||
"To request support for other data provided by the "
|
||||
"Bank of Canada Valet Web Services, please open an "
|
||||
"issue in pricehist's Gitlab project. "
|
||||
)
|
||||
|
||||
def symbols(self):
|
||||
url = "https://www.bankofcanada.ca/valet/lists/series/json"
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
series_names = data["series"].keys()
|
||||
fx_series_names = [
|
||||
n for n in series_names if len(n) == 8 and n[0:2] == "FX"
|
||||
]
|
||||
results = [
|
||||
(f"{n[2:5]}/{n[5:9]}", data["series"][n]["description"])
|
||||
for n in sorted(fx_series_names)
|
||||
]
|
||||
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if not results:
|
||||
raise exceptions.ResponseParsingError("Expected data not found")
|
||||
else:
|
||||
return results
|
||||
|
||||
def fetch(self, series):
|
||||
if len(series.base) != 3 or len(series.quote) != 3:
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
series_name = f"FX{series.base}{series.quote}"
|
||||
data = self._data(series, series_name)
|
||||
|
||||
prices = []
|
||||
for o in data.get("observations", []):
|
||||
prices.append(Price(o["d"], Decimal(o[series_name]["v"])))
|
||||
|
||||
return dataclasses.replace(series, prices=prices)
|
||||
|
||||
def _data(self, series, series_name):
|
||||
url = f"https://www.bankofcanada.ca/valet/observations/{series_name}/json"
|
||||
params = {
|
||||
"start_date": series.start,
|
||||
"end_date": series.end,
|
||||
"order_dir": "asc",
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
|
||||
try:
|
||||
result = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if code == 404 and "not found" in text:
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
elif code == 400 and "End date must be greater than the Start date" in text:
|
||||
raise exceptions.BadResponse(result["message"])
|
||||
else:
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
return result
|
|
@ -1,67 +1,80 @@
|
|||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from textwrap import TextWrapper
|
||||
from typing import List, Tuple
|
||||
|
||||
import curlify
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
class BaseSource(ABC):
|
||||
@abstractmethod
|
||||
def id(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def name(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def description(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def source_url(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def start(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def types(self) -> list[str]:
|
||||
pass
|
||||
def types(self) -> List[str]:
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def notes(self) -> str:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
def normalizesymbol(self, str) -> str:
|
||||
return str.upper()
|
||||
|
||||
@abstractmethod
|
||||
def symbols(self) -> list[(str, str)]:
|
||||
pass
|
||||
def symbols(self) -> List[Tuple[str, str]]:
|
||||
pass # pragma: nocover
|
||||
|
||||
def search(self, query) -> list[(str, str)]:
|
||||
pass
|
||||
def search(self, query) -> List[Tuple[str, str]]:
|
||||
pass # pragma: nocover
|
||||
|
||||
@abstractmethod
|
||||
def fetch(self, series: Series) -> Series:
|
||||
pass
|
||||
pass # pragma: nocover
|
||||
|
||||
def log_curl(self, response):
|
||||
curl = curlify.to_curl(response.request, compressed=True)
|
||||
logging.debug(f"Request to {self.id()}: {curl}")
|
||||
logging.debug(curl)
|
||||
return response
|
||||
|
||||
def format_symbols(self) -> str:
|
||||
symbols = self.symbols()
|
||||
with exceptions.handler():
|
||||
symbols = self.symbols()
|
||||
|
||||
width = max([len(sym) for sym, desc in symbols] + [0])
|
||||
lines = [sym.ljust(width + 4) + desc + "\n" for sym, desc in symbols]
|
||||
return "".join(lines)
|
||||
|
||||
def format_search(self, query) -> str:
|
||||
if (symbols := self.search(query)) is None:
|
||||
with exceptions.handler():
|
||||
symbols = self.search(query)
|
||||
|
||||
if symbols is None:
|
||||
logging.error(f"Symbol search is not possible for the {self.id()} source.")
|
||||
exit(1)
|
||||
elif symbols == []:
|
||||
logging.info(f"No results found for query '{query}'.")
|
||||
return ""
|
||||
else:
|
||||
width = max([len(sym) for sym, desc in symbols] + [0])
|
||||
lines = [sym.ljust(width + 4) + desc + "\n" for sym, desc in symbols]
|
||||
|
@ -69,15 +82,18 @@ class BaseSource(ABC):
|
|||
|
||||
def format_info(self, total_width=80) -> str:
|
||||
k_width = 11
|
||||
parts = [
|
||||
self._fmt_field("ID", self.id(), k_width, total_width),
|
||||
self._fmt_field("Name", self.name(), k_width, total_width),
|
||||
self._fmt_field("Description", self.description(), k_width, total_width),
|
||||
self._fmt_field("URL", self.source_url(), k_width, total_width, False),
|
||||
self._fmt_field("Start", self.start(), k_width, total_width),
|
||||
self._fmt_field("Types", ", ".join(self.types()), k_width, total_width),
|
||||
self._fmt_field("Notes", self.notes(), k_width, total_width),
|
||||
]
|
||||
with exceptions.handler():
|
||||
parts = [
|
||||
self._fmt_field("ID", self.id(), k_width, total_width),
|
||||
self._fmt_field("Name", self.name(), k_width, total_width),
|
||||
self._fmt_field(
|
||||
"Description", self.description(), k_width, total_width
|
||||
),
|
||||
self._fmt_field("URL", self.source_url(), k_width, total_width, False),
|
||||
self._fmt_field("Start", self.start(), k_width, total_width),
|
||||
self._fmt_field("Types", ", ".join(self.types()), k_width, total_width),
|
||||
self._fmt_field("Notes", self.notes(), k_width, total_width),
|
||||
]
|
||||
return "\n".join(filter(None, parts))
|
||||
|
||||
def _fmt_field(self, key, value, key_width, total_width, force=True):
|
||||
|
|
164
src/pricehist/sources/coinbasepro.py
Normal file
164
src/pricehist/sources/coinbasepro.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
import dataclasses
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
||||
|
||||
class CoinbasePro(BaseSource):
|
||||
def id(self):
|
||||
return "coinbasepro"
|
||||
|
||||
def name(self):
|
||||
return "Coinbase Pro"
|
||||
|
||||
def description(self):
|
||||
return "The Coinbase Pro feed API provides market data to the public."
|
||||
|
||||
def source_url(self):
|
||||
return "https://docs.pro.coinbase.com/"
|
||||
|
||||
def start(self):
|
||||
return "2015-07-20"
|
||||
|
||||
def types(self):
|
||||
return ["mid", "open", "high", "low", "close"]
|
||||
|
||||
def notes(self):
|
||||
return (
|
||||
"This source uses Coinbase's Pro APIs, not the v2 API.\n"
|
||||
"No key or other authentication is requried because it only uses "
|
||||
"the feed APIs that provide market data and are public."
|
||||
)
|
||||
|
||||
def symbols(self):
|
||||
products_url = "https://api.pro.coinbase.com/products"
|
||||
currencies_url = "https://api.pro.coinbase.com/currencies"
|
||||
|
||||
try:
|
||||
products_response = self.log_curl(requests.get(products_url))
|
||||
currencies_response = self.log_curl(requests.get(currencies_url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
products_response.raise_for_status()
|
||||
currencies_response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
products_data = json.loads(products_response.content)
|
||||
currencies_data = json.loads(currencies_response.content)
|
||||
currencies = {c["id"]: c for c in currencies_data}
|
||||
|
||||
results = []
|
||||
for i in sorted(products_data, key=lambda i: i["id"]):
|
||||
base = i["base_currency"]
|
||||
quote = i["quote_currency"]
|
||||
base_name = currencies[base]["name"] if currencies[base] else base
|
||||
quote_name = currencies[quote]["name"] if currencies[quote] else quote
|
||||
results.append((f"{base}/{quote}", f"{base_name} against {quote_name}"))
|
||||
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if not results:
|
||||
raise exceptions.ResponseParsingError("Expected data not found")
|
||||
else:
|
||||
return results
|
||||
|
||||
def fetch(self, series):
|
||||
data = []
|
||||
for seg_start, seg_end in self._segments(series.start, series.end):
|
||||
data.extend(self._data(series.base, series.quote, seg_start, seg_end))
|
||||
|
||||
prices = []
|
||||
for item in data:
|
||||
prices.append(Price(item["date"], self._amount(item, series.type)))
|
||||
|
||||
return dataclasses.replace(series, prices=prices)
|
||||
|
||||
def _segments(self, start, end, length=290):
|
||||
start = datetime.fromisoformat(start).date()
|
||||
end = max(datetime.fromisoformat(end).date(), start)
|
||||
|
||||
segments = []
|
||||
seg_start = start
|
||||
while seg_start <= end:
|
||||
seg_end = min(seg_start + timedelta(days=length - 1), end)
|
||||
segments.append((seg_start.isoformat(), seg_end.isoformat()))
|
||||
seg_start = seg_end + timedelta(days=1)
|
||||
|
||||
return segments
|
||||
|
||||
def _data(self, base, quote, start, end):
|
||||
product = f"{base}-{quote}"
|
||||
url = f"https://api.pro.coinbase.com/products/{product}/candles"
|
||||
params = {
|
||||
"start": start,
|
||||
"end": end,
|
||||
"granularity": "86400",
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
if code == 400 and "aggregations requested exceeds" in text:
|
||||
raise exceptions.BadResponse("Too many data points requested.")
|
||||
elif code == 400 and "start must be before end" in text:
|
||||
raise exceptions.BadResponse("The end can't preceed the start.")
|
||||
elif code == 400 and "is too old" in text:
|
||||
raise exceptions.BadResponse("The requested interval is too early.")
|
||||
elif code == 404 and "NotFound" in text:
|
||||
raise exceptions.InvalidPair(base, quote, self)
|
||||
elif code == 429:
|
||||
raise exceptions.RateLimit(
|
||||
"The rate limit has been exceeded. For more information see "
|
||||
"https://docs.pro.coinbase.com/#rate-limit."
|
||||
)
|
||||
else:
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
result = reversed(
|
||||
[
|
||||
{
|
||||
"date": self._ts_to_date(candle[0]),
|
||||
"low": candle[1],
|
||||
"high": candle[2],
|
||||
"open": candle[3],
|
||||
"close": candle[4],
|
||||
}
|
||||
for candle in json.loads(response.content)
|
||||
if start <= self._ts_to_date(candle[0]) <= end
|
||||
]
|
||||
)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
return result
|
||||
|
||||
def _ts_to_date(self, ts):
|
||||
return datetime.fromtimestamp(ts, tz=timezone.utc).date().isoformat()
|
||||
|
||||
def _amount(self, item, type):
|
||||
if type in ["mid"]:
|
||||
high = Decimal(str(item["high"]))
|
||||
low = Decimal(str(item["low"]))
|
||||
return sum([high, low]) / 2
|
||||
else:
|
||||
return Decimal(str(item[type]))
|
|
@ -1,9 +1,11 @@
|
|||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -18,7 +20,9 @@ class CoinDesk(BaseSource):
|
|||
|
||||
def description(self):
|
||||
return (
|
||||
"An average of Bitcoin prices across leading global exchanges. \n"
|
||||
"WARNING: This source is deprecated. Data stops at 2022-07-10.\n"
|
||||
"The documentation URL now redirects to the main page.\n"
|
||||
"An average of Bitcoin prices across leading global exchanges.\n"
|
||||
"Powered by CoinDesk, https://www.coindesk.com/price/bitcoin"
|
||||
)
|
||||
|
||||
|
@ -36,19 +40,47 @@ class CoinDesk(BaseSource):
|
|||
|
||||
def symbols(self):
|
||||
url = "https://api.coindesk.com/v1/bpi/supported-currencies.json"
|
||||
response = self.log_curl(requests.get(url))
|
||||
data = json.loads(response.content)
|
||||
relevant = [i for i in data if i["currency"] not in ["XBT", "BTC"]]
|
||||
return [
|
||||
(f"BTC/{i['currency']}", f"Bitcoin against {i['country']}")
|
||||
for i in sorted(relevant, key=lambda i: i["currency"])
|
||||
]
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
relevant = [i for i in data if i["currency"] not in ["BTC", "XBT"]]
|
||||
results = [
|
||||
(f"BTC/{i['currency']}", f"Bitcoin against {i['country']}")
|
||||
for i in sorted(relevant, key=lambda i: i["currency"])
|
||||
]
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if not results:
|
||||
raise exceptions.ResponseParsingError("Expected data not found")
|
||||
else:
|
||||
return results
|
||||
|
||||
def fetch(self, series):
|
||||
logging.warning("This source is deprecated. Data stops at 2022-07-10.")
|
||||
|
||||
if series.base != "BTC" or series.quote in ["BTC", "XBT"]:
|
||||
# BTC is the only valid base.
|
||||
# BTC as the quote will return BTC/USD, which we don't want.
|
||||
# XBT as the quote will fail with HTTP status 500.
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
data = self._data(series)
|
||||
|
||||
prices = []
|
||||
for (d, v) in data["bpi"].items():
|
||||
for (d, v) in data.get("bpi", {}).items():
|
||||
prices.append(Price(d, Decimal(str(v))))
|
||||
|
||||
return dataclasses.replace(series, prices=prices)
|
||||
|
||||
def _data(self, series):
|
||||
|
@ -58,5 +90,37 @@ class CoinDesk(BaseSource):
|
|||
"start": series.start,
|
||||
"end": series.end,
|
||||
}
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
return json.loads(response.content)
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
if code == 404 and "currency was not found" in text:
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
elif code == 404 and "only covers data from" in text:
|
||||
raise exceptions.BadResponse(text)
|
||||
elif code == 404 and "end date is before" in text and series.end < series.start:
|
||||
raise exceptions.BadResponse("End date is before start date.")
|
||||
elif code == 404 and "end date is before" in text:
|
||||
raise exceptions.BadResponse("The start date must be in the past.")
|
||||
elif code == 500 and "No results returned from database" in text:
|
||||
raise exceptions.BadResponse(
|
||||
"No results returned from database. This can happen when data "
|
||||
"for a valid quote currency (e.g. CUP) doesn't go all the way "
|
||||
"back to the start date, and potentially for other reasons."
|
||||
)
|
||||
else:
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
result = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import dataclasses
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from functools import lru_cache
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -31,13 +33,16 @@ class CoinMarketCap(BaseSource):
|
|||
|
||||
def notes(self):
|
||||
return (
|
||||
"This source makes unoffical use of endpoints that power CoinMarketCap's "
|
||||
"public web interface. The price data comes from a public equivalent of "
|
||||
"the OHLCV Historical endpoint found in CoinMarketCap's official API.\n"
|
||||
"CoinMarketCap currency symbols are not necessarily unique, so it "
|
||||
"is recommended that you use IDs, which can be listed via the "
|
||||
"--symbols option. For example, 'ETH/BTC' is 'id=1027/id=1'. The "
|
||||
"corresponding symbols will be used in output."
|
||||
"This source makes unoffical use of endpoints that power "
|
||||
"CoinMarketCap's public web interface.\n"
|
||||
"CoinMarketCap currency symbols are not necessarily unique. "
|
||||
"Each symbol you give will be coverted an ID by checking fiat and "
|
||||
"metals first, then crypto by CoinMarketCap rank. "
|
||||
"The symbol data is hard-coded for fiat and metals, but fetched "
|
||||
"live for crypto.\n"
|
||||
"You can directly use IDs, which can be listed via the --symbols "
|
||||
"option. For example, 'ETH/BTC' is 'id=1027/id=1'. "
|
||||
"The corresponding symbols will be used in output, when available."
|
||||
)
|
||||
|
||||
def symbols(self):
|
||||
|
@ -47,70 +52,276 @@ class CoinMarketCap(BaseSource):
|
|||
return list(zip(ids, descriptions))
|
||||
|
||||
def fetch(self, series):
|
||||
if series.base == "ID=" or not series.quote or series.quote == "ID=":
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
data = self._data(series)
|
||||
|
||||
prices = []
|
||||
for item in data["data"]["quotes"]:
|
||||
d = item["time_open"][0:10]
|
||||
amount = self._amount(next(iter(item["quote"].values())), series.type)
|
||||
prices.append(Price(d, amount))
|
||||
for item in data.get("quotes", []):
|
||||
d = item["timeOpen"][0:10]
|
||||
amount = self._amount(item["quote"], series.type)
|
||||
if amount is not None:
|
||||
prices.append(Price(d, amount))
|
||||
|
||||
output_base, output_quote = self._output_pair(series.base, series.quote)
|
||||
output_base, output_quote = self._output_pair(series.base, series.quote, data)
|
||||
|
||||
return dataclasses.replace(
|
||||
series, base=output_base, quote=output_quote, prices=prices
|
||||
)
|
||||
|
||||
def _data(self, series):
|
||||
url = "https://web-api.coinmarketcap.com/v1/cryptocurrency/ohlcv/historical"
|
||||
url = "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical"
|
||||
|
||||
params = {}
|
||||
|
||||
if series.base.startswith("id="):
|
||||
if series.base.startswith("ID="):
|
||||
params["id"] = series.base[3:]
|
||||
else:
|
||||
params["symbol"] = series.base
|
||||
params["id"] = self._id_from_symbol(series.base, series)
|
||||
|
||||
if series.quote.startswith("id="):
|
||||
params["convert_id"] = series.quote[3:]
|
||||
if series.quote.startswith("ID="):
|
||||
params["convertId"] = series.quote[3:]
|
||||
else:
|
||||
params["convert"] = series.quote
|
||||
params["convertId"] = self._id_from_symbol(series.quote, series)
|
||||
|
||||
params["time_start"] = int(
|
||||
int(datetime.strptime(series.start, "%Y-%m-%d").timestamp())
|
||||
params["timeStart"] = int(
|
||||
int(
|
||||
datetime.strptime(series.start, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
- 24 * 60 * 60
|
||||
# Start one period earlier since the start is exclusive.
|
||||
)
|
||||
params["time_end"] = (
|
||||
int(datetime.strptime(series.end, "%Y-%m-%d").timestamp()) + 24 * 60 * 60
|
||||
) # round up to include the last day
|
||||
params["timeEnd"] = int(
|
||||
datetime.strptime(series.end, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
) # Don't round up since it's inclusive of the period covering the end time.
|
||||
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
params["interval"] = "daily"
|
||||
|
||||
return json.loads(response.content)
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
|
||||
if code == 400 and "No items found." in text:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Bad base ID."
|
||||
)
|
||||
|
||||
elif code == 400 and 'Invalid value for \\"convert_id\\"' in text:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Bad quote ID."
|
||||
)
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
parsed = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if (
|
||||
"status" in parsed
|
||||
and "error_code" in parsed["status"]
|
||||
and parsed["status"]["error_code"] == "500"
|
||||
and "The system is busy" in parsed["status"]["error_message"]
|
||||
):
|
||||
raise exceptions.BadResponse(
|
||||
"The server indicated a general error. "
|
||||
"There may be problem with your request."
|
||||
)
|
||||
|
||||
if type(parsed) is not dict or "data" not in parsed:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
elif len(parsed["data"]) == 0:
|
||||
raise exceptions.ResponseParsingError(
|
||||
"The data section was empty. This can happen when the quote "
|
||||
"currency symbol can't be found, and potentially for other reasons."
|
||||
)
|
||||
|
||||
return parsed["data"]
|
||||
|
||||
def _amount(self, data, type):
|
||||
if type in ["mid"]:
|
||||
if type in ["mid"] and data["high"] is not None and data["low"] is not None:
|
||||
high = Decimal(str(data["high"]))
|
||||
low = Decimal(str(data["low"]))
|
||||
return sum([high, low]) / 2
|
||||
else:
|
||||
elif type in data and data[type] is not None:
|
||||
return Decimal(str(data[type]))
|
||||
else:
|
||||
return None
|
||||
|
||||
def _output_pair(self, base, quote):
|
||||
if base.startswith("id=") or quote.startswith("id="):
|
||||
symbols = {i["id"]: (i["symbol"] or i["code"]) for i in self._symbol_data()}
|
||||
def _output_pair(self, base, quote, data):
|
||||
data_base = data["symbol"]
|
||||
|
||||
output_base = symbols[int(base[3:])] if base.startswith("id=") else base
|
||||
output_quote = symbols[int(quote[3:])] if quote.startswith("id=") else quote
|
||||
symbols = {i["id"]: (i["symbol"] or i["code"]) for i in self._symbol_data()}
|
||||
|
||||
data_quote = None
|
||||
if len(data["quotes"]) > 0:
|
||||
data_quote = symbols[int(data["quotes"][0]["quote"]["name"])]
|
||||
|
||||
lookup_quote = None
|
||||
if quote.startswith("ID="):
|
||||
lookup_quote = symbols[int(quote[3:])]
|
||||
|
||||
output_base = data_base
|
||||
output_quote = data_quote or lookup_quote or quote
|
||||
|
||||
return (output_base, output_quote)
|
||||
|
||||
def _symbol_data(self):
|
||||
fiat_url = "https://web-api.coinmarketcap.com/v1/fiat/map?include_metals=true"
|
||||
fiat_res = self.log_curl(requests.get(fiat_url))
|
||||
fiat = json.loads(fiat_res.content)
|
||||
crypto_url = (
|
||||
"https://web-api.coinmarketcap.com/v1/cryptocurrency/map?sort=cmc_rank"
|
||||
def _id_from_symbol(self, symbol, series):
|
||||
for i in self._symbol_data():
|
||||
if i["symbol"] == symbol:
|
||||
return i["id"]
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, f"Invalid symbol '{symbol}'."
|
||||
)
|
||||
crypto_res = self.log_curl(requests.get(crypto_url))
|
||||
crypto = json.loads(crypto_res.content)
|
||||
return crypto["data"] + fiat["data"]
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _symbol_data(self):
|
||||
|
||||
base_url = "https://api.coinmarketcap.com/data-api/v1/"
|
||||
crypto_url = f"{base_url}cryptocurrency/map?sort=cmc_rank"
|
||||
|
||||
crypto = self._get_json_data(crypto_url)
|
||||
|
||||
# fmt: off
|
||||
fiat = [
|
||||
{"id": 2781, "symbol": "USD", "name": "United States Dollar"},
|
||||
{"id": 3526, "symbol": "ALL", "name": "Albanian Lek"},
|
||||
{"id": 3537, "symbol": "DZD", "name": "Algerian Dinar"},
|
||||
{"id": 2821, "symbol": "ARS", "name": "Argentine Peso"},
|
||||
{"id": 3527, "symbol": "AMD", "name": "Armenian Dram"},
|
||||
{"id": 2782, "symbol": "AUD", "name": "Australian Dollar"},
|
||||
{"id": 3528, "symbol": "AZN", "name": "Azerbaijani Manat"},
|
||||
{"id": 3531, "symbol": "BHD", "name": "Bahraini Dinar"},
|
||||
{"id": 3530, "symbol": "BDT", "name": "Bangladeshi Taka"},
|
||||
{"id": 3533, "symbol": "BYN", "name": "Belarusian Ruble"},
|
||||
{"id": 3532, "symbol": "BMD", "name": "Bermudan Dollar"},
|
||||
{"id": 2832, "symbol": "BOB", "name": "Bolivian Boliviano"},
|
||||
{"id": 3529, "symbol": "BAM", "name": "Bosnia-Herzegovina Convertible Mark"}, # noqa: E501
|
||||
{"id": 2783, "symbol": "BRL", "name": "Brazilian Real"},
|
||||
{"id": 2814, "symbol": "BGN", "name": "Bulgarian Lev"},
|
||||
{"id": 3549, "symbol": "KHR", "name": "Cambodian Riel"},
|
||||
{"id": 2784, "symbol": "CAD", "name": "Canadian Dollar"},
|
||||
{"id": 2786, "symbol": "CLP", "name": "Chilean Peso"},
|
||||
{"id": 2787, "symbol": "CNY", "name": "Chinese Yuan"},
|
||||
{"id": 2820, "symbol": "COP", "name": "Colombian Peso"},
|
||||
{"id": 3534, "symbol": "CRC", "name": "Costa Rican Colón"},
|
||||
{"id": 2815, "symbol": "HRK", "name": "Croatian Kuna"},
|
||||
{"id": 3535, "symbol": "CUP", "name": "Cuban Peso"},
|
||||
{"id": 2788, "symbol": "CZK", "name": "Czech Koruna"},
|
||||
{"id": 2789, "symbol": "DKK", "name": "Danish Krone"},
|
||||
{"id": 3536, "symbol": "DOP", "name": "Dominican Peso"},
|
||||
{"id": 3538, "symbol": "EGP", "name": "Egyptian Pound"},
|
||||
{"id": 2790, "symbol": "EUR", "name": "Euro"},
|
||||
{"id": 3539, "symbol": "GEL", "name": "Georgian Lari"},
|
||||
{"id": 3540, "symbol": "GHS", "name": "Ghanaian Cedi"},
|
||||
{"id": 3541, "symbol": "GTQ", "name": "Guatemalan Quetzal"},
|
||||
{"id": 3542, "symbol": "HNL", "name": "Honduran Lempira"},
|
||||
{"id": 2792, "symbol": "HKD", "name": "Hong Kong Dollar"},
|
||||
{"id": 2793, "symbol": "HUF", "name": "Hungarian Forint"},
|
||||
{"id": 2818, "symbol": "ISK", "name": "Icelandic Króna"},
|
||||
{"id": 2796, "symbol": "INR", "name": "Indian Rupee"},
|
||||
{"id": 2794, "symbol": "IDR", "name": "Indonesian Rupiah"},
|
||||
{"id": 3544, "symbol": "IRR", "name": "Iranian Rial"},
|
||||
{"id": 3543, "symbol": "IQD", "name": "Iraqi Dinar"},
|
||||
{"id": 2795, "symbol": "ILS", "name": "Israeli New Shekel"},
|
||||
{"id": 3545, "symbol": "JMD", "name": "Jamaican Dollar"},
|
||||
{"id": 2797, "symbol": "JPY", "name": "Japanese Yen"},
|
||||
{"id": 3546, "symbol": "JOD", "name": "Jordanian Dinar"},
|
||||
{"id": 3551, "symbol": "KZT", "name": "Kazakhstani Tenge"},
|
||||
{"id": 3547, "symbol": "KES", "name": "Kenyan Shilling"},
|
||||
{"id": 3550, "symbol": "KWD", "name": "Kuwaiti Dinar"},
|
||||
{"id": 3548, "symbol": "KGS", "name": "Kyrgystani Som"},
|
||||
{"id": 3552, "symbol": "LBP", "name": "Lebanese Pound"},
|
||||
{"id": 3556, "symbol": "MKD", "name": "Macedonian Denar"},
|
||||
{"id": 2800, "symbol": "MYR", "name": "Malaysian Ringgit"},
|
||||
{"id": 2816, "symbol": "MUR", "name": "Mauritian Rupee"},
|
||||
{"id": 2799, "symbol": "MXN", "name": "Mexican Peso"},
|
||||
{"id": 3555, "symbol": "MDL", "name": "Moldovan Leu"},
|
||||
{"id": 3558, "symbol": "MNT", "name": "Mongolian Tugrik"},
|
||||
{"id": 3554, "symbol": "MAD", "name": "Moroccan Dirham"},
|
||||
{"id": 3557, "symbol": "MMK", "name": "Myanma Kyat"},
|
||||
{"id": 3559, "symbol": "NAD", "name": "Namibian Dollar"},
|
||||
{"id": 3561, "symbol": "NPR", "name": "Nepalese Rupee"},
|
||||
{"id": 2811, "symbol": "TWD", "name": "New Taiwan Dollar"},
|
||||
{"id": 2802, "symbol": "NZD", "name": "New Zealand Dollar"},
|
||||
{"id": 3560, "symbol": "NIO", "name": "Nicaraguan Córdoba"},
|
||||
{"id": 2819, "symbol": "NGN", "name": "Nigerian Naira"},
|
||||
{"id": 2801, "symbol": "NOK", "name": "Norwegian Krone"},
|
||||
{"id": 3562, "symbol": "OMR", "name": "Omani Rial"},
|
||||
{"id": 2804, "symbol": "PKR", "name": "Pakistani Rupee"},
|
||||
{"id": 3563, "symbol": "PAB", "name": "Panamanian Balboa"},
|
||||
{"id": 2822, "symbol": "PEN", "name": "Peruvian Sol"},
|
||||
{"id": 2803, "symbol": "PHP", "name": "Philippine Peso"},
|
||||
{"id": 2805, "symbol": "PLN", "name": "Polish Złoty"},
|
||||
{"id": 2791, "symbol": "GBP", "name": "Pound Sterling"},
|
||||
{"id": 3564, "symbol": "QAR", "name": "Qatari Rial"},
|
||||
{"id": 2817, "symbol": "RON", "name": "Romanian Leu"},
|
||||
{"id": 2806, "symbol": "RUB", "name": "Russian Ruble"},
|
||||
{"id": 3566, "symbol": "SAR", "name": "Saudi Riyal"},
|
||||
{"id": 3565, "symbol": "RSD", "name": "Serbian Dinar"},
|
||||
{"id": 2808, "symbol": "SGD", "name": "Singapore Dollar"},
|
||||
{"id": 2812, "symbol": "ZAR", "name": "South African Rand"},
|
||||
{"id": 2798, "symbol": "KRW", "name": "South Korean Won"},
|
||||
{"id": 3567, "symbol": "SSP", "name": "South Sudanese Pound"},
|
||||
{"id": 3573, "symbol": "VES", "name": "Sovereign Bolivar"},
|
||||
{"id": 3553, "symbol": "LKR", "name": "Sri Lankan Rupee"},
|
||||
{"id": 2807, "symbol": "SEK", "name": "Swedish Krona"},
|
||||
{"id": 2785, "symbol": "CHF", "name": "Swiss Franc"},
|
||||
{"id": 2809, "symbol": "THB", "name": "Thai Baht"},
|
||||
{"id": 3569, "symbol": "TTD", "name": "Trinidad and Tobago Dollar"},
|
||||
{"id": 3568, "symbol": "TND", "name": "Tunisian Dinar"},
|
||||
{"id": 2810, "symbol": "TRY", "name": "Turkish Lira"},
|
||||
{"id": 3570, "symbol": "UGX", "name": "Ugandan Shilling"},
|
||||
{"id": 2824, "symbol": "UAH", "name": "Ukrainian Hryvnia"},
|
||||
{"id": 2813, "symbol": "AED", "name": "United Arab Emirates Dirham"},
|
||||
{"id": 3571, "symbol": "UYU", "name": "Uruguayan Peso"},
|
||||
{"id": 3572, "symbol": "UZS", "name": "Uzbekistan Som"},
|
||||
{"id": 2823, "symbol": "VND", "name": "Vietnamese Dong"},
|
||||
]
|
||||
metals = [
|
||||
{"id": 3575, "symbol": "XAU", "name": "Gold Troy Ounce"},
|
||||
{"id": 3574, "symbol": "XAG", "name": "Silver Troy Ounce"},
|
||||
{"id": 3577, "symbol": "XPT", "name": "Platinum Ounce"},
|
||||
{"id": 3576, "symbol": "XPD", "name": "Palladium Ounce"},
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
return fiat + metals + crypto
|
||||
|
||||
def _get_json_data(self, url, params={}):
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
parsed = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if type(parsed) is not dict or "data" not in parsed:
|
||||
raise exceptions.ResponseParsingError("Unexpected content.")
|
||||
|
||||
elif len(parsed["data"]) == 0:
|
||||
raise exceptions.ResponseParsingError("Empty data section.")
|
||||
|
||||
return parsed["data"]
|
||||
|
|
|
@ -5,7 +5,7 @@ from decimal import Decimal
|
|||
import requests
|
||||
from lxml import etree
|
||||
|
||||
from pricehist import isocurrencies
|
||||
from pricehist import exceptions, isocurrencies
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -34,13 +34,17 @@ class ECB(BaseSource):
|
|||
return ""
|
||||
|
||||
def symbols(self):
|
||||
root = self._data(more_than_90_days=True)
|
||||
nodes = root.cssselect("[currency]")
|
||||
currencies = sorted(set([n.attrib["currency"] for n in nodes]))
|
||||
quotes = self._quotes()
|
||||
iso = isocurrencies.by_code()
|
||||
return [(f"EUR/{c}", f"Euro against {iso[c].name}") for c in currencies]
|
||||
return [
|
||||
(f"EUR/{c}", f"Euro against {iso[c].name if c in iso else c}")
|
||||
for c in quotes
|
||||
]
|
||||
|
||||
def fetch(self, series):
|
||||
if series.base != "EUR" or not series.quote: # EUR is the only valid base.
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
almost_90_days_ago = (datetime.now().date() - timedelta(days=85)).isoformat()
|
||||
root = self._data(series.start < almost_90_days_ago)
|
||||
|
||||
|
@ -50,12 +54,24 @@ class ECB(BaseSource):
|
|||
for row in day.cssselect(f"[currency='{series.quote}']"):
|
||||
rate = Decimal(row.attrib["rate"])
|
||||
all_rows.insert(0, (date, rate))
|
||||
|
||||
if not all_rows and series.quote not in self._quotes():
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
selected = [
|
||||
Price(d, r) for d, r in all_rows if d >= series.start and d <= series.end
|
||||
]
|
||||
|
||||
return dataclasses.replace(series, prices=selected)
|
||||
|
||||
def _quotes(self):
|
||||
root = self._data(more_than_90_days=True)
|
||||
nodes = root.cssselect("[currency]")
|
||||
quotes = sorted(set([n.attrib["currency"] for n in nodes]))
|
||||
if not quotes:
|
||||
raise exceptions.ResponseParsingError("Expected data not found")
|
||||
return quotes
|
||||
|
||||
def _data(self, more_than_90_days=False):
|
||||
url_base = "https://www.ecb.europa.eu/stats/eurofxref"
|
||||
if more_than_90_days:
|
||||
|
@ -63,6 +79,19 @@ class ECB(BaseSource):
|
|||
else:
|
||||
source_url = f"{url_base}/eurofxref-hist-90d.xml" # last 90 days
|
||||
|
||||
response = self.log_curl(requests.get(source_url))
|
||||
root = etree.fromstring(response.content)
|
||||
try:
|
||||
response = self.log_curl(requests.get(source_url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
root = etree.fromstring(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
return root
|
||||
|
|
122
src/pricehist/sources/exchangeratehost.py
Normal file
122
src/pricehist/sources/exchangeratehost.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
import dataclasses
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
||||
|
||||
class ExchangeRateHost(BaseSource):
|
||||
def id(self):
|
||||
return "exchangeratehost"
|
||||
|
||||
def name(self):
|
||||
return "exchangerate.host Exchange rates API"
|
||||
|
||||
def description(self):
|
||||
return (
|
||||
"Exchange rates API is a simple and lightweight free service for "
|
||||
"current and historical foreign exchange rates & crypto exchange "
|
||||
"rates."
|
||||
)
|
||||
|
||||
def source_url(self):
|
||||
return "https://exchangerate.host/"
|
||||
|
||||
def start(self):
|
||||
return "1999-01-01"
|
||||
|
||||
def types(self):
|
||||
return ["close"]
|
||||
|
||||
def notes(self):
|
||||
return ""
|
||||
|
||||
def symbols(self):
|
||||
url = "https://api.coindesk.com/v1/bpi/supported-currencies.json"
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
relevant = [i for i in data if i["currency"] not in ["BTC", "XBT"]]
|
||||
results = [
|
||||
(f"BTC/{i['currency']}", f"Bitcoin against {i['country']}")
|
||||
for i in sorted(relevant, key=lambda i: i["currency"])
|
||||
]
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
if not results:
|
||||
raise exceptions.ResponseParsingError("Expected data not found")
|
||||
else:
|
||||
return results
|
||||
|
||||
def fetch(self, series):
|
||||
if series.base != "BTC" or series.quote in ["BTC", "XBT"]:
|
||||
# BTC is the only valid base.
|
||||
# BTC as the quote will return BTC/USD, which we don't want.
|
||||
# XBT as the quote will fail with HTTP status 500.
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
|
||||
data = self._data(series)
|
||||
|
||||
prices = []
|
||||
for (d, v) in data.get("bpi", {}).items():
|
||||
prices.append(Price(d, Decimal(str(v))))
|
||||
|
||||
return dataclasses.replace(series, prices=prices)
|
||||
|
||||
def _data(self, series):
|
||||
url = "https://api.coindesk.com/v1/bpi/historical/close.json"
|
||||
params = {
|
||||
"currency": series.quote,
|
||||
"start": series.start,
|
||||
"end": series.end,
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
if code == 404 and "currency was not found" in text:
|
||||
raise exceptions.InvalidPair(series.base, series.quote, self)
|
||||
elif code == 404 and "only covers data from" in text:
|
||||
raise exceptions.BadResponse(text)
|
||||
elif code == 404 and "end date is before" in text and series.end < series.start:
|
||||
raise exceptions.BadResponse("End date is before start date.")
|
||||
elif code == 404 and "end date is before" in text:
|
||||
raise exceptions.BadResponse("The start date must be in the past.")
|
||||
elif code == 500 and "No results returned from database" in text:
|
||||
raise exceptions.BadResponse(
|
||||
"No results returned from database. This can happen when data "
|
||||
"for a valid quote currency (e.g. CUP) doesn't go all the way "
|
||||
"back to the start date, and potentially for other reasons."
|
||||
)
|
||||
else:
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
result = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(str(e)) from e
|
||||
|
||||
return result
|
|
@ -1,12 +1,12 @@
|
|||
import csv
|
||||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
||||
from pricehist import __version__, exceptions
|
||||
from pricehist.price import Price
|
||||
|
||||
from .basesource import BaseSource
|
||||
|
@ -29,7 +29,10 @@ class Yahoo(BaseSource):
|
|||
return "https://finance.yahoo.com/"
|
||||
|
||||
def start(self):
|
||||
return "1970-01-01"
|
||||
# The "Download historical data in Yahoo Finance" page says
|
||||
# "Historical prices usually don't go back earlier than 1970", but
|
||||
# several do. Examples going back to 1962-01-02 include ED and IBM.
|
||||
return "1962-01-02"
|
||||
|
||||
def types(self):
|
||||
return ["adjclose", "open", "high", "low", "close", "mid"]
|
||||
|
@ -54,7 +57,7 @@ class Yahoo(BaseSource):
|
|||
return (
|
||||
"Find the symbol of interest on https://finance.yahoo.com/ and use "
|
||||
"that as the PAIR in your pricehist command. Prices for each symbol "
|
||||
"are given in its native currency."
|
||||
"are quoted in its native currency."
|
||||
)
|
||||
|
||||
def symbols(self):
|
||||
|
@ -62,59 +65,109 @@ class Yahoo(BaseSource):
|
|||
return []
|
||||
|
||||
def fetch(self, series):
|
||||
spark, history = self._data(series)
|
||||
if series.quote:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Don't specify the quote currency."
|
||||
)
|
||||
|
||||
output_quote = spark["spark"]["result"][0]["response"][0]["meta"]["currency"]
|
||||
data = self._data(series)
|
||||
quote = data["chart"]["result"][0]["meta"]["currency"]
|
||||
offset = data["chart"]["result"][0]["meta"]["gmtoffset"]
|
||||
|
||||
timestamps = data["chart"]["result"][0]["timestamp"]
|
||||
adjclose_data = data["chart"]["result"][0]["indicators"]["adjclose"][0]
|
||||
rest_data = data["chart"]["result"][0]["indicators"]["quote"][0]
|
||||
amounts = {**adjclose_data, **rest_data}
|
||||
|
||||
prices = [
|
||||
Price(row["date"], amount)
|
||||
for row in history
|
||||
if (amount := self._amount(row, series.type))
|
||||
Price(date, amount)
|
||||
for i in range(len(timestamps))
|
||||
if (date := self._ts_to_date(timestamps[i] + offset)) <= series.end
|
||||
if (amount := self._amount(amounts, series.type, i)) is not None
|
||||
]
|
||||
|
||||
return dataclasses.replace(series, quote=output_quote, prices=prices)
|
||||
return dataclasses.replace(series, quote=quote, prices=prices)
|
||||
|
||||
def _amount(self, row, type):
|
||||
if type != "mid" and row[type] != "null":
|
||||
return Decimal(row[type])
|
||||
elif type == "mid" and row["high"] != "null" and row["low"] != "null":
|
||||
return sum([Decimal(row["high"]), Decimal(row["low"])]) / 2
|
||||
def _ts_to_date(self, ts) -> str:
|
||||
return datetime.fromtimestamp(ts, tz=timezone.utc).date().isoformat()
|
||||
|
||||
def _amount(self, amounts, type, i):
|
||||
if type == "mid" and amounts["high"] != "null" and amounts["low"] != "null":
|
||||
return sum([Decimal(amounts["high"][i]), Decimal(amounts["low"][i])]) / 2
|
||||
elif amounts[type] != "null" and amounts[type][i] is not None:
|
||||
return Decimal(amounts[type][i])
|
||||
else:
|
||||
return None
|
||||
|
||||
def _data(self, series) -> (dict, csv.DictReader):
|
||||
base_url = "https://query1.finance.yahoo.com/v7/finance"
|
||||
def _data(self, series) -> dict:
|
||||
base_url = "https://query1.finance.yahoo.com/v8/finance/chart"
|
||||
headers = {"User-Agent": f"pricehist/{__version__}"}
|
||||
url = f"{base_url}/{series.base}"
|
||||
|
||||
spark_url = f"{base_url}/spark"
|
||||
spark_params = {
|
||||
"symbols": series.base,
|
||||
"range": "1d",
|
||||
"interval": "1d",
|
||||
"indicators": "close",
|
||||
"includeTimestamps": "false",
|
||||
"includePrePost": "false",
|
||||
}
|
||||
spark_response = self.log_curl(requests.get(spark_url, params=spark_params))
|
||||
spark = json.loads(spark_response.content)
|
||||
|
||||
start_ts = int(datetime.strptime(series.start, "%Y-%m-%d").timestamp())
|
||||
end_ts = int(datetime.strptime(series.end, "%Y-%m-%d").timestamp()) + (
|
||||
start_ts = int(
|
||||
datetime.strptime(series.start, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
end_ts = int(
|
||||
datetime.strptime(series.end, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
) + (
|
||||
24 * 60 * 60
|
||||
) # round up to include the last day
|
||||
) # some symbols require padding on the end timestamp
|
||||
|
||||
history_url = f"{base_url}/download/{series.base}"
|
||||
history_params = {
|
||||
params = {
|
||||
"symbol": series.base,
|
||||
"period1": start_ts,
|
||||
"period2": end_ts,
|
||||
"interval": "1d",
|
||||
"events": "history",
|
||||
"events": "capitalGain%7Cdiv%7Csplit",
|
||||
"includeAdjustedClose": "true",
|
||||
"formatted": "true",
|
||||
"userYfid": "true",
|
||||
"lang": "en-US",
|
||||
"region": "US",
|
||||
}
|
||||
history_response = self.log_curl(
|
||||
requests.get(history_url, params=history_params)
|
||||
)
|
||||
history_lines = history_response.content.decode("utf-8").splitlines()
|
||||
history_lines[0] = history_lines[0].lower().replace(" ", "")
|
||||
history = csv.DictReader(history_lines, delimiter=",")
|
||||
|
||||
return (spark, history)
|
||||
try:
|
||||
response = self.log_curl(requests.get(url, params=params, headers=headers))
|
||||
except Exception as e:
|
||||
raise exceptions.RequestError(str(e)) from e
|
||||
|
||||
code = response.status_code
|
||||
text = response.text
|
||||
|
||||
if code == 404 and "No data found, symbol may be delisted" in text:
|
||||
raise exceptions.InvalidPair(
|
||||
series.base, series.quote, self, "Symbol not found."
|
||||
)
|
||||
elif code == 400 and "Data doesn't exist" in text:
|
||||
raise exceptions.BadResponse(
|
||||
"No data for the given interval. Try requesting a larger interval."
|
||||
)
|
||||
elif code == 404 and "Timestamp data missing" in text:
|
||||
raise exceptions.BadResponse(
|
||||
"Data missing. The given interval may be for a gap in the data "
|
||||
"such as a weekend or holiday. Try requesting a larger interval."
|
||||
)
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise exceptions.BadResponse(str(e)) from e
|
||||
|
||||
try:
|
||||
data = json.loads(response.content)
|
||||
except Exception as e:
|
||||
raise exceptions.ResponseParsingError(
|
||||
"The data couldn't be parsed. "
|
||||
) from e
|
||||
|
||||
if "timestamp" not in data["chart"]["result"][0]:
|
||||
raise exceptions.BadResponse(
|
||||
"No data for the given interval. "
|
||||
"There may be a problem with the symbol or the interval."
|
||||
)
|
||||
|
||||
return data
|
||||
|
|
169
tests/live.sh
Executable file
169
tests/live.sh
Executable file
|
@ -0,0 +1,169 @@
|
|||
#!/bin/bash
|
||||
|
||||
# These are basic happy path tests that run pricehist from the command line and
|
||||
# confirm that the results come out as expected. They help ensure that the main
|
||||
# endpoints for each source are still working.
|
||||
|
||||
# Run this from the project root.
|
||||
|
||||
export ALPHAVANTAGE_API_KEY="TEST_KEY_$RANDOM"
|
||||
cmd_prefix="poetry run"
|
||||
|
||||
passed=0
|
||||
failed=0
|
||||
skipped=0
|
||||
|
||||
run_test(){
|
||||
name=$1
|
||||
cmd=$2
|
||||
expected=$3
|
||||
echo "TEST: $name"
|
||||
echo " Action: $cmd"
|
||||
echo -n " Result: "
|
||||
full_cmd="$cmd_prefix $cmd"
|
||||
actual=$($full_cmd 2>&1)
|
||||
if [[ "$actual" == "$expected" ]]; then
|
||||
passed=$((passed+1))
|
||||
echo "passed, output as expected"
|
||||
else
|
||||
failed=$((failed+1))
|
||||
echo "failed, output differs as follows..."
|
||||
echo
|
||||
diff <(echo "$expected") <(echo "$actual")
|
||||
fi
|
||||
echo
|
||||
}
|
||||
|
||||
skip_test(){
|
||||
name=$1
|
||||
cmd=$2
|
||||
echo "TEST: $name"
|
||||
echo " Action: $cmd"
|
||||
echo " Result: SKIPPED!"
|
||||
skipped=$((skipped+1))
|
||||
echo
|
||||
}
|
||||
|
||||
report(){
|
||||
total=$((passed+failed))
|
||||
if [[ "$skipped" -eq "0" ]]; then
|
||||
skipped_str="none"
|
||||
else
|
||||
skipped_str="$skipped"
|
||||
fi
|
||||
if [[ "$failed" -eq "0" ]]; then
|
||||
echo "SUMMARY: $passed tests passed, none failed, $skipped_str skipped"
|
||||
else
|
||||
echo "SUMMARY: $failed/$total tests failed, $skipped_str skipped"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
name="Alpha Vantage stocks"
|
||||
cmd="pricehist fetch alphavantage TSLA -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,TSLA,USD,729.7700,alphavantage,close
|
||||
2021-01-05,TSLA,USD,735.1100,alphavantage,close
|
||||
2021-01-06,TSLA,USD,755.9800,alphavantage,close
|
||||
2021-01-07,TSLA,USD,816.0400,alphavantage,close
|
||||
2021-01-08,TSLA,USD,880.0200,alphavantage,close
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="Alpha Vantage physical currency"
|
||||
cmd="pricehist fetch alphavantage AUD/EUR -s 2021-01-11 -e 2021-01-14"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-11,AUD,EUR,0.63374,alphavantage,close
|
||||
2021-01-12,AUD,EUR,0.63684,alphavantage,close
|
||||
2021-01-13,AUD,EUR,0.63686,alphavantage,close
|
||||
2021-01-14,AUD,EUR,0.63984,alphavantage,close
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="Alpha Vantage digital currency"
|
||||
cmd="pricehist fetch alphavantage BTC/USD -s 2024-07-01 -e 2024-07-05"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2024-07-01,BTC,USD,62830.13000000,alphavantage,close
|
||||
2024-07-02,BTC,USD,62040.22000000,alphavantage,close
|
||||
2024-07-03,BTC,USD,60145.01000000,alphavantage,close
|
||||
2024-07-04,BTC,USD,57042.14000000,alphavantage,close
|
||||
2024-07-05,BTC,USD,56639.43000000,alphavantage,close
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="Bank of Canada"
|
||||
cmd="pricehist fetch bankofcanada CAD/USD -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,CAD,USD,0.7843,bankofcanada,default
|
||||
2021-01-05,CAD,USD,0.7870,bankofcanada,default
|
||||
2021-01-06,CAD,USD,0.7883,bankofcanada,default
|
||||
2021-01-07,CAD,USD,0.7870,bankofcanada,default
|
||||
2021-01-08,CAD,USD,0.7871,bankofcanada,default
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="Coinbase Pro"
|
||||
cmd="pricehist fetch coinbasepro BTC/EUR -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,BTC,EUR,24127,coinbasepro,mid
|
||||
2021-01-05,BTC,EUR,26201.31,coinbasepro,mid
|
||||
2021-01-06,BTC,EUR,28527.005,coinbasepro,mid
|
||||
2021-01-07,BTC,EUR,31208.49,coinbasepro,mid
|
||||
2021-01-08,BTC,EUR,32019,coinbasepro,mid
|
||||
END
|
||||
skip_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="CoinDesk Bitcoin Price Index v1"
|
||||
cmd="pricehist fetch coindeskbpi BTC/USD -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,BTC,USD,31431.6123,coindeskbpi,close
|
||||
2021-01-05,BTC,USD,34433.6065,coindeskbpi,close
|
||||
2021-01-06,BTC,USD,36275.7563,coindeskbpi,close
|
||||
2021-01-07,BTC,USD,39713.5079,coindeskbpi,close
|
||||
2021-01-08,BTC,USD,40519.4486,coindeskbpi,close
|
||||
END
|
||||
skip_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="CoinMarketCap"
|
||||
cmd="pricehist fetch coinmarketcap BTC/EUR -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,BTC,EUR,25322.5034586073,coinmarketcap,mid
|
||||
2021-01-05,BTC,EUR,26318.9928757682,coinmarketcap,mid
|
||||
2021-01-06,BTC,EUR,28570.9945210226,coinmarketcap,mid
|
||||
2021-01-07,BTC,EUR,31200.8342706036,coinmarketcap,mid
|
||||
2021-01-08,BTC,EUR,32157.05279624555,coinmarketcap,mid
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="European Central Bank"
|
||||
cmd="pricehist fetch ecb EUR/JPY -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,EUR,JPY,126.62,ecb,reference
|
||||
2021-01-05,EUR,JPY,126.25,ecb,reference
|
||||
2021-01-06,EUR,JPY,127.03,ecb,reference
|
||||
2021-01-07,EUR,JPY,127.13,ecb,reference
|
||||
2021-01-08,EUR,JPY,127.26,ecb,reference
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
name="Yahoo! Finance"
|
||||
cmd="pricehist fetch yahoo TSLA -s 2021-01-04 -e 2021-01-08"
|
||||
read -r -d '' expected <<END
|
||||
date,base,quote,amount,source,type
|
||||
2021-01-04,TSLA,USD,243.2566680908203125,yahoo,adjclose
|
||||
2021-01-05,TSLA,USD,245.0366668701171875,yahoo,adjclose
|
||||
2021-01-06,TSLA,USD,251.9933319091796875,yahoo,adjclose
|
||||
2021-01-07,TSLA,USD,272.013336181640625,yahoo,adjclose
|
||||
2021-01-08,TSLA,USD,293.339996337890625,yahoo,adjclose
|
||||
END
|
||||
run_test "$name" "$cmd" "$expected"
|
||||
|
||||
report
|
0
tests/pricehist/outputs/__init__.py
Normal file
0
tests/pricehist/outputs/__init__.py
Normal file
44
tests/pricehist/outputs/test_beancount.py
Normal file
44
tests/pricehist/outputs/test_beancount.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.format import Format
|
||||
from pricehist.outputs.beancount import Beancount
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def out():
|
||||
return Beancount()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
prices = [
|
||||
Price("2021-01-01", Decimal("24139.4648")),
|
||||
Price("2021-01-02", Decimal("26533.576")),
|
||||
Price("2021-01-03", Decimal("27001.2846")),
|
||||
]
|
||||
return Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03", prices)
|
||||
|
||||
|
||||
def test_format_basics(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
result = out.format(series, source, Format())
|
||||
assert result == (
|
||||
"2021-01-01 price BTC 24139.4648 EUR\n"
|
||||
"2021-01-02 price BTC 26533.576 EUR\n"
|
||||
"2021-01-03 price BTC 27001.2846 EUR\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_custom(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
fmt = Format(base="XBT", quote="EURO", thousands=".", decimal=",", datesep="/")
|
||||
result = out.format(series, source, fmt)
|
||||
assert result == (
|
||||
"2021/01/01 price XBT 24.139,4648 EURO\n"
|
||||
"2021/01/02 price XBT 26.533,576 EURO\n"
|
||||
"2021/01/03 price XBT 27.001,2846 EURO\n"
|
||||
)
|
50
tests/pricehist/outputs/test_csv.py
Normal file
50
tests/pricehist/outputs/test_csv.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.format import Format
|
||||
from pricehist.outputs.csv import CSV
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def out():
|
||||
return CSV()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
prices = [
|
||||
Price("2021-01-01", Decimal("24139.4648")),
|
||||
Price("2021-01-02", Decimal("26533.576")),
|
||||
Price("2021-01-03", Decimal("27001.2846")),
|
||||
]
|
||||
return Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03", prices)
|
||||
|
||||
|
||||
def test_format_basics(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
result = out.format(series, source, Format())
|
||||
assert result == (
|
||||
"date,base,quote,amount,source,type\n"
|
||||
"2021-01-01,BTC,EUR,24139.4648,sourceid,close\n"
|
||||
"2021-01-02,BTC,EUR,26533.576,sourceid,close\n"
|
||||
"2021-01-03,BTC,EUR,27001.2846,sourceid,close\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_custom(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
fmt = Format(
|
||||
base="XBT", quote="€", thousands=".", decimal=",", datesep="/", csvdelim="/"
|
||||
)
|
||||
result = out.format(series, source, fmt)
|
||||
assert result == (
|
||||
"date/base/quote/amount/source/type\n"
|
||||
'"2021/01/01"/XBT/€/24.139,4648/sourceid/close\n'
|
||||
'"2021/01/02"/XBT/€/26.533,576/sourceid/close\n'
|
||||
'"2021/01/03"/XBT/€/27.001,2846/sourceid/close\n'
|
||||
)
|
140
tests/pricehist/outputs/test_gnucashsql.py
Normal file
140
tests/pricehist/outputs/test_gnucashsql.py
Normal file
|
@ -0,0 +1,140 @@
|
|||
import dataclasses
|
||||
import logging
|
||||
import re
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.format import Format
|
||||
from pricehist.outputs.gnucashsql import GnuCashSQL
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def out():
|
||||
return GnuCashSQL()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
prices = [
|
||||
Price("2021-01-01", Decimal("24139.4648")),
|
||||
Price("2021-01-02", Decimal("26533.576")),
|
||||
Price("2021-01-03", Decimal("27001.2846")),
|
||||
]
|
||||
return Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03", prices)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src(mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="coindesk")
|
||||
return source
|
||||
|
||||
|
||||
def test_format_base_and_quote(out, series, src):
|
||||
result = out.format(series, src, Format())
|
||||
base, quote = re.findall(r"WHERE mnemonic = (.*) LIMIT", result, re.MULTILINE)
|
||||
assert base == "'BTC'"
|
||||
assert quote == "'EUR'"
|
||||
|
||||
|
||||
def test_format_new_price_values(out, series, src):
|
||||
result = out.format(series, src, Format())
|
||||
values = re.search(
|
||||
r"\(guid, date, base, quote, source, type, "
|
||||
r"value_num, value_denom\) VALUES\n([^;]*);",
|
||||
result,
|
||||
re.MULTILINE,
|
||||
)[1]
|
||||
assert values == (
|
||||
"('0c4c01bd0a252641b806ce46f716f161', '2021-01-01 00:00:00', "
|
||||
"'BTC', 'EUR', 'coindesk', 'close', 241394648, 10000),\n"
|
||||
"('47f895ddfcce18e2421387e0e1b636e9', '2021-01-02 00:00:00', "
|
||||
"'BTC', 'EUR', 'coindesk', 'close', 26533576, 1000),\n"
|
||||
"('0d81630c4ac50c1b9b7c8211bf99c94e', '2021-01-03 00:00:00', "
|
||||
"'BTC', 'EUR', 'coindesk', 'close', 270012846, 10000)\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_customized(out, series, src):
|
||||
fmt = Format(
|
||||
base="XBT",
|
||||
quote="EURO",
|
||||
datesep="/",
|
||||
time="23:59:59",
|
||||
)
|
||||
result = out.format(series, src, fmt)
|
||||
base, quote = re.findall(r"WHERE mnemonic = (.*) LIMIT", result, re.MULTILINE)
|
||||
values = re.search(
|
||||
r"\(guid, date, base, quote, source, type, "
|
||||
r"value_num, value_denom\) VALUES\n([^;]*);",
|
||||
result,
|
||||
re.MULTILINE,
|
||||
)[1]
|
||||
assert base == "'XBT'"
|
||||
assert quote == "'EURO'"
|
||||
assert values == (
|
||||
"('448173eef5dea23cea9ff9d5e8c7b07e', '2021/01/01 23:59:59', "
|
||||
"'XBT', 'EURO', 'coindesk', 'close', 241394648, 10000),\n"
|
||||
"('b6c0f4474c91c50e8f65b47767f874ba', '2021/01/02 23:59:59', "
|
||||
"'XBT', 'EURO', 'coindesk', 'close', 26533576, 1000),\n"
|
||||
"('2937c872cf0672863e11b9f46ee41e09', '2021/01/03 23:59:59', "
|
||||
"'XBT', 'EURO', 'coindesk', 'close', 270012846, 10000)\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_escaping_of_strings(out, series, src):
|
||||
result = out.format(series, src, Format(base="B'tc''n"))
|
||||
base, quote = re.findall(r"WHERE mnemonic = (.*) LIMIT", result, re.MULTILINE)
|
||||
assert base == "'B''tc''''n'"
|
||||
|
||||
|
||||
def test_format_insert_commented_out_if_no_values(out, series, src):
|
||||
empty_series = dataclasses.replace(series, prices=[])
|
||||
result = out.format(empty_series, src, Format())
|
||||
(
|
||||
"-- INSERT INTO new_prices (guid, date, base, quote, source, type, "
|
||||
"value_num, value_denom) VALUES\n"
|
||||
"-- \n"
|
||||
"-- ;\n"
|
||||
) in result
|
||||
|
||||
|
||||
def test_format_warns_about_backslash(out, series, src, caplog):
|
||||
with caplog.at_level(logging.WARNING):
|
||||
out.format(series, src, Format(quote="EU\\RO"))
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "WARNING"
|
||||
assert "backslashes in strings" in r.message
|
||||
|
||||
|
||||
def test__english_join_other_cases(out):
|
||||
assert out._english_join([]) == ""
|
||||
assert out._english_join(["one"]) == "one"
|
||||
assert out._english_join(["one", "two"]) == "one and two"
|
||||
assert out._english_join(["one", "two", "three"]) == "one, two and three"
|
||||
|
||||
|
||||
def test_format_warns_about_out_of_range_numbers(out, series, src, caplog):
|
||||
too_big_numerator = Decimal("9223372036854.775808")
|
||||
s = dataclasses.replace(series, prices=[Price("2021-01-01", too_big_numerator)])
|
||||
with caplog.at_level(logging.WARNING):
|
||||
out.format(s, src, Format())
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "WARNING"
|
||||
assert "outside of the int64 range" in r.message
|
||||
|
||||
|
||||
def test__rational_other_exponent_cases(out):
|
||||
assert out._rational(Decimal("9223372036854e6")) == (
|
||||
"9223372036854000000",
|
||||
"1",
|
||||
True,
|
||||
)
|
||||
assert out._rational(Decimal("9223372036854e-6")) == (
|
||||
"9223372036854",
|
||||
"1000000",
|
||||
True,
|
||||
)
|
168
tests/pricehist/outputs/test_json.py
Normal file
168
tests/pricehist/outputs/test_json.py
Normal file
|
@ -0,0 +1,168 @@
|
|||
from decimal import Decimal
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.format import Format
|
||||
from pricehist.outputs.json import JSON
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def json_out():
|
||||
return JSON()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def jsonl_out():
|
||||
return JSON(jsonl=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
prices = [
|
||||
Price("2021-01-01", Decimal("24139.4648")),
|
||||
Price("2021-01-02", Decimal("26533.576")),
|
||||
Price("2021-01-03", Decimal("27001.2846")),
|
||||
]
|
||||
return Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03", prices)
|
||||
|
||||
|
||||
def test_format_basics(json_out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
result = json_out.format(series, source, Format())
|
||||
assert (
|
||||
result
|
||||
== dedent(
|
||||
"""
|
||||
[
|
||||
{
|
||||
"date": "2021-01-01",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": "24139.4648",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021-01-02",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": "26533.576",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021-01-03",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": "27001.2846",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
}
|
||||
]
|
||||
"""
|
||||
).strip()
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_basic_jsonl(jsonl_out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
result = jsonl_out.format(series, source, Format())
|
||||
assert (
|
||||
result
|
||||
== dedent(
|
||||
"""
|
||||
{"date": "2021-01-01", "base": "BTC", "quote": "EUR", "amount": "24139.4648", "source": "sourceid", "type": "close"}
|
||||
{"date": "2021-01-02", "base": "BTC", "quote": "EUR", "amount": "26533.576", "source": "sourceid", "type": "close"}
|
||||
{"date": "2021-01-03", "base": "BTC", "quote": "EUR", "amount": "27001.2846", "source": "sourceid", "type": "close"}
|
||||
""" # noqa
|
||||
).strip()
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_custom(json_out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
fmt = Format(base="XBT", quote="€", thousands=".", decimal=",", datesep="/")
|
||||
result = json_out.format(series, source, fmt)
|
||||
assert (
|
||||
result
|
||||
== dedent(
|
||||
"""
|
||||
[
|
||||
{
|
||||
"date": "2021/01/01",
|
||||
"base": "XBT",
|
||||
"quote": "€",
|
||||
"amount": "24.139,4648",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021/01/02",
|
||||
"base": "XBT",
|
||||
"quote": "€",
|
||||
"amount": "26.533,576",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021/01/03",
|
||||
"base": "XBT",
|
||||
"quote": "€",
|
||||
"amount": "27.001,2846",
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
}
|
||||
]
|
||||
"""
|
||||
).strip()
|
||||
+ "\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_numbers(json_out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
source.id = mocker.MagicMock(return_value="sourceid")
|
||||
fmt = Format(jsonnums=True)
|
||||
result = json_out.format(series, source, fmt)
|
||||
assert (
|
||||
result
|
||||
== dedent(
|
||||
"""
|
||||
[
|
||||
{
|
||||
"date": "2021-01-01",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": 24139.4648,
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021-01-02",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": 26533.576,
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
},
|
||||
{
|
||||
"date": "2021-01-03",
|
||||
"base": "BTC",
|
||||
"quote": "EUR",
|
||||
"amount": 27001.2846,
|
||||
"source": "sourceid",
|
||||
"type": "close"
|
||||
}
|
||||
]
|
||||
"""
|
||||
).strip()
|
||||
+ "\n"
|
||||
)
|
52
tests/pricehist/outputs/test_ledger.py
Normal file
52
tests/pricehist/outputs/test_ledger.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.format import Format
|
||||
from pricehist.outputs.ledger import Ledger
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def out():
|
||||
return Ledger()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
prices = [
|
||||
Price("2021-01-01", Decimal("24139.4648")),
|
||||
Price("2021-01-02", Decimal("26533.576")),
|
||||
Price("2021-01-03", Decimal("27001.2846")),
|
||||
]
|
||||
return Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03", prices)
|
||||
|
||||
|
||||
def test_format_basics(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
result = out.format(series, source, Format())
|
||||
assert result == (
|
||||
"P 2021-01-01 00:00:00 BTC 24139.4648 EUR\n"
|
||||
"P 2021-01-02 00:00:00 BTC 26533.576 EUR\n"
|
||||
"P 2021-01-03 00:00:00 BTC 27001.2846 EUR\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_custom(out, series, mocker):
|
||||
source = mocker.MagicMock()
|
||||
fmt = Format(
|
||||
base="XBT",
|
||||
quote="€",
|
||||
time="23:59:59",
|
||||
thousands=".",
|
||||
decimal=",",
|
||||
symbol="left",
|
||||
datesep="/",
|
||||
)
|
||||
result = out.format(series, source, fmt)
|
||||
assert result == (
|
||||
"P 2021/01/01 23:59:59 XBT €24.139,4648\n"
|
||||
"P 2021/01/02 23:59:59 XBT €26.533,576\n"
|
||||
"P 2021/01/03 23:59:59 XBT €27.001,2846\n"
|
||||
)
|
678
tests/pricehist/sources/test_alphavantage.py
Normal file
678
tests/pricehist/sources/test_alphavantage.py
Normal file
|
@ -0,0 +1,678 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import __version__, exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.alphavantage import AlphaVantage
|
||||
|
||||
api_key_name = "ALPHAVANTAGE_API_KEY"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_settings_env_vars(monkeypatch):
|
||||
value = "NOTAREALKEY12345"
|
||||
if not os.getenv(api_key_name):
|
||||
monkeypatch.setenv(api_key_name, value, prepend=False)
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return AlphaVantage()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
physical_list_url = "https://www.alphavantage.co/physical_currency_list/"
|
||||
digital_list_url = "https://www.alphavantage.co/digital_currency_list/"
|
||||
|
||||
search_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=SYMBOL_SEARCH.*"
|
||||
)
|
||||
stock_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY&.*"
|
||||
)
|
||||
adj_stock_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=TIME_SERIES_DAILY_ADJUSTED.*"
|
||||
)
|
||||
physical_url = re.compile(r"https://www\.alphavantage\.co/query\?function=FX_DAILY.*")
|
||||
digital_url = re.compile(
|
||||
r"https://www\.alphavantage\.co/query\?function=DIGITAL_CURRENCY_DAILY.*"
|
||||
)
|
||||
|
||||
rate_limit_json = (
|
||||
'{ "Information": "'
|
||||
"Thank you for using Alpha Vantage! Our standard API rate limit is 25 "
|
||||
"requests per day. Please subscribe to any of the premium plans at "
|
||||
"https://www.alphavantage.co/premium/ to instantly remove all daily rate "
|
||||
"limits."
|
||||
'" }'
|
||||
)
|
||||
|
||||
premium_json = (
|
||||
'{ "Information": "Thank you for using Alpha Vantage! This is a premium '
|
||||
"endpoint and there are multiple ways to unlock premium endpoints: (1) "
|
||||
"become a holder of Alpha Vantage Coin (AVC), an Ethereum-based "
|
||||
"cryptocurrency that provides various utility & governance functions "
|
||||
"within the Alpha Vantage ecosystem (AVC mining guide: "
|
||||
"https://www.alphatournament.com/avc_mining_guide/) to unlock all "
|
||||
"premium endpoints, (2) subscribe to any of the premium plans at "
|
||||
"https://www.alphavantage.co/premium/ to instantly unlock all premium "
|
||||
'endpoints" }'
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def physical_list_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "physical-partial.csv").read_text()
|
||||
requests_mock.add(responses.GET, physical_list_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def digital_list_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "digital-partial.csv").read_text()
|
||||
requests_mock.add(responses.GET, digital_list_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_ok(requests_mock):
|
||||
text = (Path(os.path.splitext(__file__)[0]) / "search-ibm.json").read_text()
|
||||
requests_mock.add(responses.GET, search_url, body=text, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def search_not_found(requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body='{"bestMatches":[]}', status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ibm_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "ibm-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, stock_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ibm_adj_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "ibm-partial-adj.json").read_text()
|
||||
requests_mock.add(responses.GET, adj_stock_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def euraud_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "eur-aud-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, physical_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def btcaud_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "btc-aud-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, digital_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("tsla") == "TSLA"
|
||||
assert src.normalizesymbol("btc") == "BTC"
|
||||
assert src.normalizesymbol("eur") == "EUR"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols_stock_message(src, physical_list_ok, digital_list_ok, caplog):
|
||||
with caplog.at_level(logging.INFO):
|
||||
src.symbols()
|
||||
assert any(["Stock symbols can be discovered" in r.message for r in caplog.records])
|
||||
|
||||
|
||||
def test_symbols(src, physical_list_ok, digital_list_ok):
|
||||
syms = src.symbols()
|
||||
assert ("BTC", "Digital: Bitcoin") in syms
|
||||
assert ("AUD", "Physical: Australian Dollar") in syms
|
||||
assert len(syms) > 2
|
||||
|
||||
|
||||
def test_symbols_digital_network_issue(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
digital_list_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_bad_status(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_no_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, body="NOT CSV", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Symbols data missing." in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_digital_bad_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, digital_list_url, body="A,B,C\na,b,c", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "too many values" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_network_issue(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
physical_list_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_bad_status(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_list_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_physical_no_data(src, digital_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_list_url, body="", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Symbols data missing." in str(e.value)
|
||||
|
||||
|
||||
def test_search(src, search_ok):
|
||||
results = src.search("IBM")
|
||||
req = search_ok.calls[0].request
|
||||
assert req.params["function"] == "SYMBOL_SEARCH"
|
||||
assert req.params["keywords"] == "IBM"
|
||||
assert len(req.params["apikey"]) > 0
|
||||
assert len(results) == 10
|
||||
for expected in [
|
||||
("IBM", "International Business Machines Corp, Equity, United States, USD"),
|
||||
("IBMJ", "iShares iBonds Dec 2021 Term Muni Bond ETF, ETF, United States, USD"),
|
||||
("IBMK", "iShares iBonds Dec 2022 Term Muni Bond ETF, ETF, United States, USD"),
|
||||
("IBM.DEX", "International Business Machines Corporation, Equity, XETRA, EUR"),
|
||||
]:
|
||||
assert expected in results
|
||||
|
||||
|
||||
def test_search_network_issue(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
search_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.search("IBM")
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_status(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.search("IBM")
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_data(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body="NOT JSON", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_json(src, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body="{}", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "Unexpected content." in str(e.value)
|
||||
|
||||
|
||||
def test_search_bad_json_tricky(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET, search_url, body='{"bestMatches": [{}]}', status=200
|
||||
)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.search("IBM")
|
||||
assert "Unexpected content." in str(e.value)
|
||||
|
||||
|
||||
def test_search_rate_limit(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, search_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_known(src, type, search_ok, ibm_ok):
|
||||
series = src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
search_req = search_ok.calls[0].request
|
||||
stock_req = ibm_ok.calls[1].request
|
||||
assert search_req.params["function"] == "SYMBOL_SEARCH"
|
||||
assert search_req.params["keywords"] == "IBM"
|
||||
assert stock_req.params["function"] == "TIME_SERIES_DAILY"
|
||||
assert stock_req.params["symbol"] == "IBM"
|
||||
assert stock_req.params["outputsize"] == "full"
|
||||
assert (series.base, series.quote) == ("IBM", "USD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("123.94"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("128.53"))
|
||||
|
||||
|
||||
def test_fetch_stock_compact_if_recent(src, type, search_ok, ibm_ok):
|
||||
today = datetime.now().date()
|
||||
start = (today - timedelta(days=30)).isoformat()
|
||||
end = today.isoformat()
|
||||
src.fetch(Series("IBM", "", type, start, end))
|
||||
stock_req = ibm_ok.calls[1].request
|
||||
assert stock_req.params["outputsize"] == "compact"
|
||||
|
||||
|
||||
def test_fetch_stock_requests_logged(src, type, search_ok, ibm_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and "curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 2
|
||||
|
||||
|
||||
def test_fetch_stock_types_all_available(src, search_ok, ibm_ok):
|
||||
cls = src.fetch(Series("IBM", "", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("IBM", "", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("IBM", "", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("IBM", "", "low", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("IBM", "", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("123.94")
|
||||
assert opn.prices[0].amount == Decimal("125.85")
|
||||
assert hgh.prices[0].amount == Decimal("125.9174")
|
||||
assert low.prices[0].amount == Decimal("123.04")
|
||||
assert mid.prices[0].amount == Decimal("124.4787")
|
||||
|
||||
|
||||
def test_fetch_stock_types_adj_available(src, search_ok, ibm_adj_ok):
|
||||
adj = src.fetch(Series("IBM", "", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert adj.prices[0].amount == Decimal("120.943645029")
|
||||
|
||||
|
||||
def test_fetch_stock_type_mid_is_mean_of_low_and_high(src, search_ok, ibm_ok):
|
||||
hgh = src.fetch(Series("IBM", "", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("IBM", "", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("IBM", "", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_stock_bad_sym(src, type, search_not_found, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
stock_url,
|
||||
status=200,
|
||||
body="""{
|
||||
"Error Message": "Invalid API call. Please retry or..."
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTASTOCK", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unknown stock symbol" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_quote_found_prices_error(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
stock_url,
|
||||
status=200,
|
||||
body="""{
|
||||
"Error Message": "Invalid API call. Please retry or..."
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "bad response" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_network_issue(src, type, search_ok, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, stock_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_bad_status(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_parsing_error(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_unexpected_json(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_rate_limit(src, type, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, stock_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("IBM", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_stock_premium(src, search_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, adj_stock_url, body=premium_json)
|
||||
with pytest.raises(exceptions.CredentialsError) as e:
|
||||
src.fetch(Series("IBM", "", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert "denied access to a premium endpoint" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_known(src, type, physical_list_ok, euraud_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
req = euraud_ok.calls[1].request
|
||||
assert req.params["function"] == "FX_DAILY"
|
||||
assert req.params["from_symbol"] == "EUR"
|
||||
assert req.params["to_symbol"] == "AUD"
|
||||
assert req.params["outputsize"] == "full"
|
||||
assert (series.base, series.quote) == ("EUR", "AUD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("1.59718"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("1.57350"))
|
||||
|
||||
|
||||
def test_fetch_physical_compact_if_recent(src, type, physical_list_ok, euraud_ok):
|
||||
today = datetime.now().date()
|
||||
start = (today - timedelta(days=30)).isoformat()
|
||||
end = today.isoformat()
|
||||
src.fetch(Series("EUR", "AUD", type, start, end))
|
||||
req = euraud_ok.calls[1].request
|
||||
assert req.params["outputsize"] == "compact"
|
||||
|
||||
|
||||
def test_fetch_physical_requests_logged(src, type, physical_list_ok, euraud_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and "curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 2
|
||||
|
||||
|
||||
def test_fetch_physical_types_but_adjclose_available(src, physical_list_ok, euraud_ok):
|
||||
cls = src.fetch(Series("EUR", "AUD", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("EUR", "AUD", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("EUR", "AUD", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("EUR", "AUD", "low", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("EUR", "AUD", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("1.59718")
|
||||
assert opn.prices[0].amount == Decimal("1.58741")
|
||||
assert hgh.prices[0].amount == Decimal("1.60296")
|
||||
assert low.prices[0].amount == Decimal("1.58550")
|
||||
assert mid.prices[0].amount == Decimal("1.59423")
|
||||
|
||||
|
||||
def test_fetch_physical_adjclose_not_available(src):
|
||||
with pytest.raises(exceptions.InvalidType) as e:
|
||||
src.fetch(Series("EUR", "AUD", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert "Invalid price type 'adjclose' for pair 'EUR/AUD'." in str(e)
|
||||
|
||||
|
||||
def test_fetch_physical_type_mid_is_mean_of_low_and_high(
|
||||
src, physical_list_ok, euraud_ok
|
||||
):
|
||||
hgh = src.fetch(Series("EUR", "AUD", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("EUR", "AUD", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("EUR", "AUD", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_physical_bad_sym(src, type, physical_list_ok, digital_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTPHYSICAL", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "base must be a known physical or digital currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_network_issue(src, type, physical_list_ok, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, physical_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_bad_status(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_parsing_error(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_unexpected_json(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_physical_rate_limit(src, type, physical_list_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, physical_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_known(src, type, physical_list_ok, digital_list_ok, btcaud_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
req = btcaud_ok.calls[2].request
|
||||
assert req.params["function"] == "DIGITAL_CURRENCY_DAILY"
|
||||
assert req.params["symbol"] == "BTC"
|
||||
assert req.params["market"] == "AUD"
|
||||
assert (series.base, series.quote) == ("BTC", "AUD")
|
||||
assert len(series.prices) == 5
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("43406.76014740"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("55068.43820140"))
|
||||
|
||||
|
||||
def test_fetch_digital_requests_logged(
|
||||
src, type, physical_list_ok, digital_list_ok, btcaud_ok, caplog
|
||||
):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and "curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 3
|
||||
|
||||
|
||||
def test_fetch_digital_types_but_adjclose_available(
|
||||
src, physical_list_ok, digital_list_ok, btcaud_ok
|
||||
):
|
||||
cls = src.fetch(Series("BTC", "AUD", "close", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("BTC", "AUD", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert cls.prices[0].amount == Decimal("43406.76014740")
|
||||
assert opn.prices[0].amount == Decimal("44779.08784700")
|
||||
assert hgh.prices[0].amount == Decimal("45593.18400000")
|
||||
assert low.prices[0].amount == Decimal("38170.72220000")
|
||||
assert mid.prices[0].amount == Decimal("41881.95310000")
|
||||
|
||||
|
||||
def test_fetch_digital_adjclose_not_available(src):
|
||||
with pytest.raises(exceptions.InvalidType) as e:
|
||||
src.fetch(Series("BTC", "AUD", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
assert "Invalid price type 'adjclose' for pair 'BTC/AUD'." in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_type_mid_is_mean_of_low_and_high(
|
||||
src, physical_list_ok, digital_list_ok, btcaud_ok
|
||||
):
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-04", "2021-01-08")).prices
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_digital_bad_sym(src, type, physical_list_ok, digital_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTDIGITAL", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "base must be a known physical or digital currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_network_issue(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, digital_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_bad_status(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_parsing_error(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_unexpected_json(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_digital_rate_limit(
|
||||
src, type, physical_list_ok, digital_list_ok, requests_mock
|
||||
):
|
||||
requests_mock.add(responses.GET, digital_url, body=rate_limit_json)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "rate limit" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_pair_quote_non_physical(src, type, physical_list_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("EUR", "BTC", type, "2021-01-04", "2021-01-08"))
|
||||
assert "quote must be a physical currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_api_key_defaults_to_generic(
|
||||
src, type, physical_list_ok, euraud_ok, monkeypatch
|
||||
):
|
||||
monkeypatch.delenv(api_key_name)
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
req = euraud_ok.calls[-1].request
|
||||
assert req.params["apikey"] == f"pricehist_{__version__}"
|
||||
|
||||
|
||||
def test_fetch_api_key_invalid(src, type, physical_list_ok, requests_mock):
|
||||
body = (
|
||||
'{ "Error Message": "the parameter apikey is invalid or missing. Please '
|
||||
"claim your free API key on (https://www.alphavantage.co/support/#api-key). "
|
||||
'It should take less than 20 seconds." }'
|
||||
)
|
||||
requests_mock.add(responses.GET, physical_url, body=body)
|
||||
with pytest.raises(exceptions.CredentialsError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "unavailable or invalid" in str(e.value)
|
|
@ -0,0 +1,62 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Daily Prices and Volumes for Digital Currency",
|
||||
"2. Digital Currency Code": "BTC",
|
||||
"3. Digital Currency Name": "Bitcoin",
|
||||
"4. Market Code": "AUD",
|
||||
"5. Market Name": "Australian Dollar",
|
||||
"6. Last Refreshed": "2021-07-28 00:00:00",
|
||||
"7. Time Zone": "UTC"
|
||||
},
|
||||
"Time Series (Digital Currency Daily)": {
|
||||
"2021-01-09": {
|
||||
"1. open": "55074.06950240",
|
||||
"2. high": "56150.17720000",
|
||||
"3. low": "52540.71680000",
|
||||
"4. close": "54397.30924680",
|
||||
"5. volume": "75785.97967500"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "53507.50941120",
|
||||
"2. high": "56923.63300000",
|
||||
"3. low": "49528.31000000",
|
||||
"4. close": "55068.43820140",
|
||||
"5. volume": "139789.95749900"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "49893.81535840",
|
||||
"2. high": "54772.88310000",
|
||||
"3. low": "49256.92200000",
|
||||
"4. close": "53507.23802320",
|
||||
"5. volume": "132825.70043700"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "46067.47523820",
|
||||
"2. high": "50124.29161740",
|
||||
"3. low": "45169.81872000",
|
||||
"4. close": "49893.81535840",
|
||||
"5. volume": "127139.20131000"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "43408.17136500",
|
||||
"2. high": "46624.45840000",
|
||||
"3. low": "40572.50600000",
|
||||
"4. close": "46067.47523820",
|
||||
"5. volume": "116049.99703800"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "44779.08784700",
|
||||
"2. high": "45593.18400000",
|
||||
"3. low": "38170.72220000",
|
||||
"4. close": "43406.76014740",
|
||||
"5. volume": "140899.88569000"
|
||||
},
|
||||
"2021-01-03": {
|
||||
"1. open": "43661.51206300",
|
||||
"2. high": "47191.80858340",
|
||||
"3. low": "43371.85965060",
|
||||
"4. close": "44779.08784700",
|
||||
"5. volume": "120957.56675000"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
currency code,currency name
|
||||
BTC,Bitcoin
|
||||
ETH,Ethereum
|
|
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Forex Daily Prices (open, high, low, close)",
|
||||
"2. From Symbol": "EUR",
|
||||
"3. To Symbol": "AUD",
|
||||
"4. Output Size": "Full size",
|
||||
"5. Last Refreshed": "2021-07-27 11:35:00",
|
||||
"6. Time Zone": "UTC"
|
||||
},
|
||||
"Time Series FX (Daily)": {
|
||||
"2021-01-11": {
|
||||
"1. open": "1.57496",
|
||||
"2. high": "1.58318",
|
||||
"3. low": "1.57290",
|
||||
"4. close": "1.57823"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "1.57879",
|
||||
"2. high": "1.58140",
|
||||
"3. low": "1.57177",
|
||||
"4. close": "1.57350"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "1.57901",
|
||||
"2. high": "1.58650",
|
||||
"3. low": "1.57757",
|
||||
"4. close": "1.57893"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "1.58390",
|
||||
"2. high": "1.58800",
|
||||
"3. low": "1.57640",
|
||||
"4. close": "1.57932"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "1.59698",
|
||||
"2. high": "1.59886",
|
||||
"3. low": "1.58100",
|
||||
"4. close": "1.58389"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "1.58741",
|
||||
"2. high": "1.60296",
|
||||
"3. low": "1.58550",
|
||||
"4. close": "1.59718"
|
||||
},
|
||||
"2021-01-01": {
|
||||
"1. open": "1.58730",
|
||||
"2. high": "1.58730",
|
||||
"3. low": "1.58504",
|
||||
"4. close": "1.58668"
|
||||
},
|
||||
"2020-12-31": {
|
||||
"1. open": "1.59946",
|
||||
"2. high": "1.60138",
|
||||
"3. low": "1.58230",
|
||||
"4. close": "1.58730"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Daily Time Series with Splits and Dividend Events",
|
||||
"2. Symbol": "IBM",
|
||||
"3. Last Refreshed": "2021-07-20",
|
||||
"4. Output Size": "Full size",
|
||||
"5. Time Zone": "US/Eastern"
|
||||
},
|
||||
"Time Series (Daily)": {
|
||||
"2021-01-11": {
|
||||
"1. open": "127.95",
|
||||
"2. high": "129.675",
|
||||
"3. low": "127.66",
|
||||
"4. close": "128.58",
|
||||
"5. adjusted close": "125.471469081",
|
||||
"6. volume": "5602466",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "128.57",
|
||||
"2. high": "129.32",
|
||||
"3. low": "126.98",
|
||||
"4. close": "128.53",
|
||||
"5. adjusted close": "125.422677873",
|
||||
"6. volume": "4676487",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "130.04",
|
||||
"2. high": "130.46",
|
||||
"3. low": "128.26",
|
||||
"4. close": "128.99",
|
||||
"5. adjusted close": "125.871556982",
|
||||
"6. volume": "4507382",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "126.9",
|
||||
"2. high": "131.88",
|
||||
"3. low": "126.72",
|
||||
"4. close": "129.29",
|
||||
"5. adjusted close": "126.164304226",
|
||||
"6. volume": "7956740",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "125.01",
|
||||
"2. high": "126.68",
|
||||
"3. low": "124.61",
|
||||
"4. close": "126.14",
|
||||
"5. adjusted close": "123.090458157",
|
||||
"6. volume": "6114619",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "125.85",
|
||||
"2. high": "125.9174",
|
||||
"3. low": "123.04",
|
||||
"4. close": "123.94",
|
||||
"5. adjusted close": "120.943645029",
|
||||
"6. volume": "5179161",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
},
|
||||
"2020-12-31": {
|
||||
"1. open": "124.22",
|
||||
"2. high": "126.03",
|
||||
"3. low": "123.99",
|
||||
"4. close": "125.88",
|
||||
"5. adjusted close": "122.836743878",
|
||||
"6. volume": "3574696",
|
||||
"7. dividend amount": "0.0000",
|
||||
"8. split coefficient": "1.0"
|
||||
}
|
||||
}
|
||||
}
|
53
tests/pricehist/sources/test_alphavantage/ibm-partial.json
Normal file
53
tests/pricehist/sources/test_alphavantage/ibm-partial.json
Normal file
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"Meta Data": {
|
||||
"1. Information": "Daily Time Series with Splits and Dividend Events",
|
||||
"2. Symbol": "IBM",
|
||||
"3. Last Refreshed": "2021-07-20",
|
||||
"4. Output Size": "Full size",
|
||||
"5. Time Zone": "US/Eastern"
|
||||
},
|
||||
"Time Series (Daily)": {
|
||||
"2021-01-11": {
|
||||
"1. open": "127.95",
|
||||
"2. high": "129.675",
|
||||
"3. low": "127.66",
|
||||
"4. close": "128.58"
|
||||
},
|
||||
"2021-01-08": {
|
||||
"1. open": "128.57",
|
||||
"2. high": "129.32",
|
||||
"3. low": "126.98",
|
||||
"4. close": "128.53"
|
||||
},
|
||||
"2021-01-07": {
|
||||
"1. open": "130.04",
|
||||
"2. high": "130.46",
|
||||
"3. low": "128.26",
|
||||
"4. close": "128.99"
|
||||
},
|
||||
"2021-01-06": {
|
||||
"1. open": "126.9",
|
||||
"2. high": "131.88",
|
||||
"3. low": "126.72",
|
||||
"4. close": "129.29"
|
||||
},
|
||||
"2021-01-05": {
|
||||
"1. open": "125.01",
|
||||
"2. high": "126.68",
|
||||
"3. low": "124.61",
|
||||
"4. close": "126.14"
|
||||
},
|
||||
"2021-01-04": {
|
||||
"1. open": "125.85",
|
||||
"2. high": "125.9174",
|
||||
"3. low": "123.04",
|
||||
"4. close": "123.94"
|
||||
},
|
||||
"2020-12-31": {
|
||||
"1. open": "124.22",
|
||||
"2. high": "126.03",
|
||||
"3. low": "123.99",
|
||||
"4. close": "125.88"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
currency code,currency name
|
||||
AUD,Australian Dollar
|
||||
EUR,Euro
|
||||
USD,United States Dollar
|
|
114
tests/pricehist/sources/test_alphavantage/search-ibm.json
Normal file
114
tests/pricehist/sources/test_alphavantage/search-ibm.json
Normal file
|
@ -0,0 +1,114 @@
|
|||
{
|
||||
"bestMatches": [
|
||||
{
|
||||
"1. symbol": "IBM",
|
||||
"2. name": "International Business Machines Corp",
|
||||
"3. type": "Equity",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "1.0000"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMJ",
|
||||
"2. name": "iShares iBonds Dec 2021 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMK",
|
||||
"2. name": "iShares iBonds Dec 2022 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBML",
|
||||
"2. name": "iShares iBonds Dec 2023 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMM",
|
||||
"2. name": "iShares iBonds Dec 2024 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMN",
|
||||
"2. name": "iShares iBonds Dec 2025 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBMO",
|
||||
"2. name": "iShares iBonds Dec 2026 Term Muni Bond ETF",
|
||||
"3. type": "ETF",
|
||||
"4. region": "United States",
|
||||
"5. marketOpen": "09:30",
|
||||
"6. marketClose": "16:00",
|
||||
"7. timezone": "UTC-04",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.8571"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.FRK",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "Frankfurt",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "20:00",
|
||||
"7. timezone": "UTC+02",
|
||||
"8. currency": "EUR",
|
||||
"9. matchScore": "0.7500"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.LON",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "United Kingdom",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "16:30",
|
||||
"7. timezone": "UTC+01",
|
||||
"8. currency": "USD",
|
||||
"9. matchScore": "0.7500"
|
||||
},
|
||||
{
|
||||
"1. symbol": "IBM.DEX",
|
||||
"2. name": "International Business Machines Corporation",
|
||||
"3. type": "Equity",
|
||||
"4. region": "XETRA",
|
||||
"5. marketOpen": "08:00",
|
||||
"6. marketClose": "20:00",
|
||||
"7. timezone": "UTC+02",
|
||||
"8. currency": "EUR",
|
||||
"9. matchScore": "0.6667"
|
||||
}
|
||||
]
|
||||
}
|
246
tests/pricehist/sources/test_bankofcanada.py
Normal file
246
tests/pricehist/sources/test_bankofcanada.py
Normal file
|
@ -0,0 +1,246 @@
|
|||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.bankofcanada import BankOfCanada
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return BankOfCanada()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series_list_url():
|
||||
return "https://www.bankofcanada.ca/valet/lists/series/json"
|
||||
|
||||
|
||||
def fetch_url(series_name):
|
||||
return f"https://www.bankofcanada.ca/valet/observations/{series_name}/json"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series_list_json():
|
||||
dir = Path(os.path.splitext(__file__)[0])
|
||||
return (dir / "series-partial.json").read_text()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series_list_response_ok(requests_mock, series_list_url, series_list_json):
|
||||
requests_mock.add(responses.GET, series_list_url, body=series_list_json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recent_response_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "recent.json").read_text()
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def all_response_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "all-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("cad") == "CAD"
|
||||
assert src.normalizesymbol("usd") == "USD"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, series_list_response_ok):
|
||||
syms = src.symbols()
|
||||
assert ("CAD/USD", "Canadian dollar to US dollar daily exchange rate") in syms
|
||||
assert len(syms) > 3
|
||||
|
||||
|
||||
def test_symbols_requests_logged(src, series_list_response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.symbols()
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_symbols_not_found(src, requests_mock, series_list_url):
|
||||
requests_mock.add(responses.GET, series_list_url, body='{"series":{}}', status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "data not found" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_network_issue(src, requests_mock, series_list_url):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
series_list_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_bad_status(src, requests_mock, series_list_url):
|
||||
requests_mock.add(responses.GET, series_list_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_parsing_error(src, requests_mock, series_list_url):
|
||||
requests_mock.add(responses.GET, series_list_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_known_pair(src, type, recent_response_ok):
|
||||
series = src.fetch(Series("CAD", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_response_ok.calls[0].request
|
||||
assert req.params["order_dir"] == "asc"
|
||||
assert req.params["start_date"] == "2021-01-01"
|
||||
assert req.params["end_date"] == "2021-01-07"
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("0.7843"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("0.7870"))
|
||||
assert len(series.prices) == 4
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, type, recent_response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("CAD", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_long_hist_from_start(src, type, all_response_ok):
|
||||
series = src.fetch(Series("CAD", "USD", type, src.start(), "2021-01-07"))
|
||||
assert series.prices[0] == Price("2017-01-03", Decimal("0.7443"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("0.7870"))
|
||||
assert len(series.prices) > 13
|
||||
|
||||
|
||||
def test_fetch_from_before_start(src, type, requests_mock):
|
||||
body = """{ "observations": [] }"""
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), status=200, body=body)
|
||||
series = src.fetch(Series("CAD", "USD", type, "2000-01-01", "2017-01-01"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_to_future(src, type, all_response_ok):
|
||||
series = src.fetch(Series("CAD", "USD", type, "2021-01-01", "2100-01-01"))
|
||||
assert len(series.prices) > 0
|
||||
|
||||
|
||||
def test_wrong_dates_order(src, type, requests_mock):
|
||||
body = """{ "message": "The End date must be greater than the Start date." }"""
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), status=400, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("CAD", "USD", type, "2021-01-07", "2021-01-01"))
|
||||
assert "End date must be greater" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_in_future(src, type, requests_mock):
|
||||
body = """{ "observations": [] }"""
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), status=200, body=body)
|
||||
series = src.fetch(Series("CAD", "USD", type, "2030-01-01", "2030-01-07"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_empty(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET, fetch_url("FXCADUSD"), body="""{"observations":{}}"""
|
||||
)
|
||||
series = src.fetch(Series("CAD", "USD", type, "2021-01-03", "2021-01-03"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_no_quote(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("CAD", "", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_unknown_pair(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
fetch_url("FXCADAFN"),
|
||||
status=404,
|
||||
body="""{
|
||||
"message": "Series FXCADAFN not found.",
|
||||
"docs": "https://www.bankofcanada.ca/valet/docs"
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("CAD", "AFN", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("CAD", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
fetch_url("FXCADUSD"),
|
||||
status=500,
|
||||
body="""{"message": "Some other reason"}""",
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("CAD", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, fetch_url("FXCADUSD"), body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("CAD", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "while parsing data" in str(e.value)
|
101
tests/pricehist/sources/test_bankofcanada/all-partial.json
Normal file
101
tests/pricehist/sources/test_bankofcanada/all-partial.json
Normal file
|
@ -0,0 +1,101 @@
|
|||
{
|
||||
"terms": {
|
||||
"url": "https://www.bankofcanada.ca/terms/"
|
||||
},
|
||||
"seriesDetail": {
|
||||
"FXCADUSD": {
|
||||
"label": "CAD/USD",
|
||||
"description": "Canadian dollar to US dollar daily exchange rate",
|
||||
"dimension": {
|
||||
"key": "d",
|
||||
"name": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"observations": [
|
||||
{
|
||||
"d": "2017-01-03",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7443"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-04",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7510"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-05",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7551"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-06",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7568"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-09",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7553"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-10",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7568"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2017-01-11",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7547"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2020-12-29",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7809"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2020-12-30",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7831"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2020-12-31",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7854"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-04",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7843"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-05",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7870"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-06",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7883"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-07",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7870"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
41
tests/pricehist/sources/test_bankofcanada/recent.json
Normal file
41
tests/pricehist/sources/test_bankofcanada/recent.json
Normal file
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"terms": {
|
||||
"url": "https://www.bankofcanada.ca/terms/"
|
||||
},
|
||||
"seriesDetail": {
|
||||
"FXCADUSD": {
|
||||
"label": "CAD/USD",
|
||||
"description": "Canadian dollar to US dollar daily exchange rate",
|
||||
"dimension": {
|
||||
"key": "d",
|
||||
"name": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"observations": [
|
||||
{
|
||||
"d": "2021-01-04",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7843"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-05",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7870"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-06",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7883"
|
||||
}
|
||||
},
|
||||
{
|
||||
"d": "2021-01-07",
|
||||
"FXCADUSD": {
|
||||
"v": "0.7870"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
272
tests/pricehist/sources/test_bankofcanada/series-partial.json
Normal file
272
tests/pricehist/sources/test_bankofcanada/series-partial.json
Normal file
|
@ -0,0 +1,272 @@
|
|||
{
|
||||
"terms": {
|
||||
"url": "https://www.bankofcanada.ca/terms/"
|
||||
},
|
||||
"series": {
|
||||
"FXAUDCAD": {
|
||||
"label": "AUD/CAD",
|
||||
"description": "Australian dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXAUDCAD"
|
||||
},
|
||||
"FXBRLCAD": {
|
||||
"label": "BRL/CAD",
|
||||
"description": "Brazilian real to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXBRLCAD"
|
||||
},
|
||||
"FXCNYCAD": {
|
||||
"label": "CNY/CAD",
|
||||
"description": "Chinese renminbi to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCNYCAD"
|
||||
},
|
||||
"FXEURCAD": {
|
||||
"label": "EUR/CAD",
|
||||
"description": "European euro to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXEURCAD"
|
||||
},
|
||||
"FXHKDCAD": {
|
||||
"label": "HKD/CAD",
|
||||
"description": "Hong Kong dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXHKDCAD"
|
||||
},
|
||||
"FXINRCAD": {
|
||||
"label": "INR/CAD",
|
||||
"description": "Indian rupee to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXINRCAD"
|
||||
},
|
||||
"FXIDRCAD": {
|
||||
"label": "IDR/CAD",
|
||||
"description": "Indonesian rupiah to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXIDRCAD"
|
||||
},
|
||||
"FXJPYCAD": {
|
||||
"label": "JPY/CAD",
|
||||
"description": "Japanese yen to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXJPYCAD"
|
||||
},
|
||||
"FXMYRCAD": {
|
||||
"label": "MYR/CAD",
|
||||
"description": "Malaysian ringgit to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXMYRCAD"
|
||||
},
|
||||
"FXMXNCAD": {
|
||||
"label": "MXN/CAD",
|
||||
"description": "Mexican peso to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXMXNCAD"
|
||||
},
|
||||
"FXNZDCAD": {
|
||||
"label": "NZD/CAD",
|
||||
"description": "New Zealand dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXNZDCAD"
|
||||
},
|
||||
"FXNOKCAD": {
|
||||
"label": "NOK/CAD",
|
||||
"description": "Norwegian krone to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXNOKCAD"
|
||||
},
|
||||
"FXPENCAD": {
|
||||
"label": "PEN/CAD",
|
||||
"description": "Peruvian new sol to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXPENCAD"
|
||||
},
|
||||
"FXRUBCAD": {
|
||||
"label": "RUB/CAD",
|
||||
"description": "Russian ruble to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXRUBCAD"
|
||||
},
|
||||
"FXSARCAD": {
|
||||
"label": "SAR/CAD",
|
||||
"description": "Saudi riyal to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXSARCAD"
|
||||
},
|
||||
"FXSGDCAD": {
|
||||
"label": "SGD/CAD",
|
||||
"description": "Singapore dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXSGDCAD"
|
||||
},
|
||||
"FXZARCAD": {
|
||||
"label": "ZAR/CAD",
|
||||
"description": "South African rand to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXZARCAD"
|
||||
},
|
||||
"FXKRWCAD": {
|
||||
"label": "KRW/CAD",
|
||||
"description": "South Korean won to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXKRWCAD"
|
||||
},
|
||||
"FXSEKCAD": {
|
||||
"label": "SEK/CAD",
|
||||
"description": "Swedish krona to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXSEKCAD"
|
||||
},
|
||||
"FXCHFCAD": {
|
||||
"label": "CHF/CAD",
|
||||
"description": "Swiss franc to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCHFCAD"
|
||||
},
|
||||
"FXTWDCAD": {
|
||||
"label": "TWD/CAD",
|
||||
"description": "Taiwanese dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXTWDCAD"
|
||||
},
|
||||
"FXTHBCAD": {
|
||||
"label": "THB/CAD",
|
||||
"description": "Thai baht to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXTHBCAD"
|
||||
},
|
||||
"FXTRYCAD": {
|
||||
"label": "TRY/CAD",
|
||||
"description": "Turkish lira to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXTRYCAD"
|
||||
},
|
||||
"FXGBPCAD": {
|
||||
"label": "GBP/CAD",
|
||||
"description": "UK pound sterling to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXGBPCAD"
|
||||
},
|
||||
"FXUSDCAD": {
|
||||
"label": "USD/CAD",
|
||||
"description": "US dollar to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXUSDCAD"
|
||||
},
|
||||
"FXVNDCAD": {
|
||||
"label": "VND/CAD",
|
||||
"description": "Vietnamese dong to Canadian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXVNDCAD"
|
||||
},
|
||||
"FXCADAUD": {
|
||||
"label": "CAD/AUD",
|
||||
"description": "Canadian dollar to Australian dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADAUD"
|
||||
},
|
||||
"FXCADBRL": {
|
||||
"label": "CAD/BRL",
|
||||
"description": "Canadian dollar to Brazilian real daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADBRL"
|
||||
},
|
||||
"FXCADCNY": {
|
||||
"label": "CAD/CNY",
|
||||
"description": "Canadian dollar to Chinese renminbi daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADCNY"
|
||||
},
|
||||
"FXCADEUR": {
|
||||
"label": "CAD/EUR",
|
||||
"description": "Canadian dollar to European euro daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADEUR"
|
||||
},
|
||||
"FXCADHKD": {
|
||||
"label": "CAD/HKD",
|
||||
"description": "Canadian dollar to Hong Kong dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADHKD"
|
||||
},
|
||||
"FXCADINR": {
|
||||
"label": "CAD/INR",
|
||||
"description": "Canadian dollar to Indian rupee daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADINR"
|
||||
},
|
||||
"FXCADIDR": {
|
||||
"label": "CAD/IDR",
|
||||
"description": "Canadian dollar to Indonesian rupiah daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADIDR"
|
||||
},
|
||||
"FXCADJPY": {
|
||||
"label": "CAD/JPY",
|
||||
"description": "Canadian dollar to Japanese yen daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADJPY"
|
||||
},
|
||||
"FXCADMYR": {
|
||||
"label": "CAD/MYR",
|
||||
"description": "Canadian dollar to Malaysian ringgit daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADMYR"
|
||||
},
|
||||
"FXCADMXN": {
|
||||
"label": "CAD/MXN",
|
||||
"description": "Canadian dollar to Mexican peso daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADMXN"
|
||||
},
|
||||
"FXCADNZD": {
|
||||
"label": "CAD/NZD",
|
||||
"description": "Canadian dollar to New Zealand dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADNZD"
|
||||
},
|
||||
"FXCADNOK": {
|
||||
"label": "CAD/NOK",
|
||||
"description": "Canadian dollar to Norwegian krone daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADNOK"
|
||||
},
|
||||
"FXCADPEN": {
|
||||
"label": "CAD/PEN",
|
||||
"description": "Canadian dollar to Peruvian new sol daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADPEN"
|
||||
},
|
||||
"FXCADRUB": {
|
||||
"label": "CAD/RUB",
|
||||
"description": "Canadian dollar to Russian ruble daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADRUB"
|
||||
},
|
||||
"FXCADSAR": {
|
||||
"label": "CAD/SAR",
|
||||
"description": "Canadian dollar to Saudi riyal daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADSAR"
|
||||
},
|
||||
"FXCADSGD": {
|
||||
"label": "CAD/SGD",
|
||||
"description": "Canadian dollar to Singapore dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADSGD"
|
||||
},
|
||||
"FXCADZAR": {
|
||||
"label": "CAD/ZAR",
|
||||
"description": "Canadian dollar to South African rand daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADZAR"
|
||||
},
|
||||
"FXCADKRW": {
|
||||
"label": "CAD/KRW",
|
||||
"description": "Canadian dollar to South Korean won daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADKRW"
|
||||
},
|
||||
"FXCADSEK": {
|
||||
"label": "CAD/SEK",
|
||||
"description": "Canadian dollar to Swedish krona daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADSEK"
|
||||
},
|
||||
"FXCADCHF": {
|
||||
"label": "CAD/CHF",
|
||||
"description": "Canadian dollar to Swiss franc daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADCHF"
|
||||
},
|
||||
"FXCADTWD": {
|
||||
"label": "CAD/TWD",
|
||||
"description": "Canadian dollar to Taiwanese dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADTWD"
|
||||
},
|
||||
"FXCADTHB": {
|
||||
"label": "CAD/THB",
|
||||
"description": "Canadian dollar to Thai baht daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADTHB"
|
||||
},
|
||||
"FXCADTRY": {
|
||||
"label": "CAD/TRY",
|
||||
"description": "Canadian dollar to Turkish lira daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADTRY"
|
||||
},
|
||||
"FXCADGBP": {
|
||||
"label": "CAD/GBP",
|
||||
"description": "Canadian dollar to UK pound sterling daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADGBP"
|
||||
},
|
||||
"FXCADUSD": {
|
||||
"label": "CAD/USD",
|
||||
"description": "Canadian dollar to US dollar daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADUSD"
|
||||
},
|
||||
"FXCADVND": {
|
||||
"label": "CAD/VND",
|
||||
"description": "Canadian dollar to Vietnamese dong daily exchange rate",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/FXCADVND"
|
||||
},
|
||||
"INDINF_GRACE_Q": {
|
||||
"label": "Foreign demand for Canadian non-commodity exports (GRACE) (2007=100)",
|
||||
"description": "Foreign demand for Canadian non-commodity exports (GRACE) (2007=100)",
|
||||
"link": "https://www.bankofcanada.ca/valet/series/INDINF_GRACE_Q"
|
||||
}
|
||||
}
|
||||
}
|
183
tests/pricehist/sources/test_basesource.py
Normal file
183
tests/pricehist/sources/test_basesource.py
Normal file
|
@ -0,0 +1,183 @@
|
|||
import logging
|
||||
from typing import List, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.basesource import BaseSource
|
||||
|
||||
|
||||
class TestSource(BaseSource):
|
||||
def id(self) -> str:
|
||||
return ""
|
||||
|
||||
def name(self) -> str:
|
||||
return ""
|
||||
|
||||
def description(self) -> str:
|
||||
return ""
|
||||
|
||||
def source_url(self) -> str:
|
||||
return ""
|
||||
|
||||
def start(self) -> str:
|
||||
return ""
|
||||
|
||||
def types(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def notes(self) -> str:
|
||||
return ""
|
||||
|
||||
def symbols(self) -> List[Tuple[str, str]]:
|
||||
return []
|
||||
|
||||
def fetch(self, series: Series) -> Series:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return TestSource()
|
||||
|
||||
|
||||
def test_normalizesymbol_default_uppercase(src):
|
||||
assert src.normalizesymbol("eur") == "EUR"
|
||||
|
||||
|
||||
def test_format_symbols_one(src, mocker):
|
||||
src.symbols = mocker.MagicMock(return_value=[("A", "Description")])
|
||||
assert src.format_symbols() == "A Description\n"
|
||||
|
||||
|
||||
def test_format_symbols_many(src, mocker):
|
||||
src.symbols = mocker.MagicMock(
|
||||
return_value=[
|
||||
("A", "Description"),
|
||||
("BB", "Description longer"),
|
||||
("CCC", "Description longer again"),
|
||||
("DDDD", f"Description {'very '*15}long"),
|
||||
]
|
||||
)
|
||||
assert src.format_symbols() == (
|
||||
"A Description\n"
|
||||
"BB Description longer\n"
|
||||
"CCC Description longer again\n"
|
||||
"DDDD Description very very very very very very very very "
|
||||
"very very very very very very very long\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_search(src, mocker):
|
||||
src.search = mocker.MagicMock(
|
||||
return_value=[
|
||||
("A", "Description"),
|
||||
("BB", "Description longer"),
|
||||
("CCC", "Description longer again"),
|
||||
("DDDD", f"Description {'very '*15}long"),
|
||||
]
|
||||
)
|
||||
assert src.format_search("some query") == (
|
||||
"A Description\n"
|
||||
"BB Description longer\n"
|
||||
"CCC Description longer again\n"
|
||||
"DDDD Description very very very very very very very very "
|
||||
"very very very very very very very long\n"
|
||||
)
|
||||
|
||||
|
||||
def test_format_search_not_possible(src, mocker, caplog):
|
||||
src.search = mocker.MagicMock(return_value=None)
|
||||
with caplog.at_level(logging.INFO):
|
||||
with pytest.raises(SystemExit) as e:
|
||||
src.format_search("some query")
|
||||
assert e.value.code == 1
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "ERROR"
|
||||
assert "Symbol search is not possible for" in r.message
|
||||
|
||||
|
||||
def test_format_search_no_results(src, mocker, caplog):
|
||||
src.search = mocker.MagicMock(return_value=[])
|
||||
with caplog.at_level(logging.INFO):
|
||||
results = src.format_search("some query")
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "INFO"
|
||||
assert "No results found" in r.message
|
||||
assert results == ""
|
||||
|
||||
|
||||
def test_format_info_skips_renderes_all_fields(src, mocker):
|
||||
src.id = mocker.MagicMock(return_value="sourceid")
|
||||
src.name = mocker.MagicMock(return_value="Source Name")
|
||||
src.description = mocker.MagicMock(return_value="Source description.")
|
||||
src.source_url = mocker.MagicMock(return_value="https://example.com/")
|
||||
src.start = mocker.MagicMock(return_value="2021-01-01")
|
||||
src.types = mocker.MagicMock(return_value=["open", "close"])
|
||||
src.notes = mocker.MagicMock(return_value="Notes for user.")
|
||||
output = src.format_info()
|
||||
assert output == (
|
||||
"ID : sourceid\n"
|
||||
"Name : Source Name\n"
|
||||
"Description : Source description.\n"
|
||||
"URL : https://example.com/\n"
|
||||
"Start : 2021-01-01\n"
|
||||
"Types : open, close\n"
|
||||
"Notes : Notes for user."
|
||||
)
|
||||
|
||||
|
||||
def test_format_info_skips_empty_fields(src, mocker):
|
||||
src.notes = mocker.MagicMock(return_value="")
|
||||
output = src.format_info()
|
||||
assert "Notes" not in output
|
||||
|
||||
|
||||
def test_format_info_wraps_long_values_with_indent(src, mocker):
|
||||
notes = (
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do "
|
||||
"eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim "
|
||||
"ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut "
|
||||
"aliquip ex ea commodo consequat."
|
||||
)
|
||||
src.notes = mocker.MagicMock(return_value=notes)
|
||||
output = src.format_info(total_width=60)
|
||||
assert output == (
|
||||
"Notes : Lorem ipsum dolor sit amet, consectetur\n"
|
||||
" adipiscing elit, sed do eiusmod tempor\n"
|
||||
" incididunt ut labore et dolore magna aliqua.\n"
|
||||
" Ut enim ad minim veniam, quis nostrud\n"
|
||||
" exercitation ullamco laboris nisi ut aliquip\n"
|
||||
" ex ea commodo consequat."
|
||||
)
|
||||
|
||||
|
||||
def test_format_info_newline_handling(src, mocker):
|
||||
notes = (
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do "
|
||||
"eiusmod tempor incididunt ut labore.\n"
|
||||
"Ut enim ad minim veniam.\n"
|
||||
"\n"
|
||||
"Quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea "
|
||||
"commodo consequat."
|
||||
)
|
||||
src.notes = mocker.MagicMock(return_value=notes)
|
||||
output = src.format_info(total_width=60)
|
||||
assert output == (
|
||||
"Notes : Lorem ipsum dolor sit amet, consectetur\n"
|
||||
" adipiscing elit, sed do eiusmod tempor\n"
|
||||
" incididunt ut labore.\n"
|
||||
" Ut enim ad minim veniam.\n"
|
||||
"\n"
|
||||
" Quis nostrud exercitation ullamco laboris nisi\n"
|
||||
" ut aliquip ex ea commodo consequat."
|
||||
)
|
||||
|
||||
|
||||
def test_format_info_does_not_wrap_source_url(src, mocker):
|
||||
url = "https://www.example.com/longlonglonglonglonglonglonglong/"
|
||||
src.source_url = mocker.MagicMock(return_value=url)
|
||||
output = src.format_info(total_width=60)
|
||||
assert output == (
|
||||
"URL : https://www.example.com/longlonglonglonglonglonglonglong/"
|
||||
)
|
334
tests/pricehist/sources/test_coinbasepro.py
Normal file
334
tests/pricehist/sources/test_coinbasepro.py
Normal file
|
@ -0,0 +1,334 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.coinbasepro import CoinbasePro
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return CoinbasePro()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def products_url():
|
||||
return "https://api.pro.coinbase.com/products"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_url():
|
||||
return "https://api.pro.coinbase.com/currencies"
|
||||
|
||||
|
||||
def product_url(base, quote):
|
||||
return f"https://api.pro.coinbase.com/products/{base}-{quote}/candles"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def products_json():
|
||||
return (Path(os.path.splitext(__file__)[0]) / "products-partial.json").read_text()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_json():
|
||||
return (Path(os.path.splitext(__file__)[0]) / "currencies-partial.json").read_text()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def products_response_ok(requests_mock, products_url, products_json):
|
||||
requests_mock.add(responses.GET, products_url, body=products_json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_response_ok(requests_mock, currencies_url, currencies_json):
|
||||
requests_mock.add(responses.GET, currencies_url, body=currencies_json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recent_response_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "recent.json").read_text()
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def multi_response_ok(requests_mock):
|
||||
url1 = re.compile(
|
||||
r"https://api\.pro\.coinbase\.com/products/BTC-EUR/candles\?start=2020-01-01.*"
|
||||
)
|
||||
url2 = re.compile(
|
||||
r"https://api\.pro\.coinbase\.com/products/BTC-EUR/candles\?start=2020-10-17.*"
|
||||
)
|
||||
json1 = (
|
||||
Path(os.path.splitext(__file__)[0]) / "2020-01-01--2020-10-16.json"
|
||||
).read_text()
|
||||
json2 = (
|
||||
Path(os.path.splitext(__file__)[0]) / "2020-10-17--2021-01-07.json"
|
||||
).read_text()
|
||||
requests_mock.add(responses.GET, url1, body=json1, status=200)
|
||||
requests_mock.add(responses.GET, url2, body=json2, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def response_empty(requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
product_url("BTC", "EUR"),
|
||||
status=200,
|
||||
body="[]",
|
||||
)
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("btc") == "BTC"
|
||||
assert src.normalizesymbol("usd") == "USD"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, products_response_ok, currencies_response_ok):
|
||||
syms = src.symbols()
|
||||
assert ("BTC/EUR", "Bitcoin against Euro") in syms
|
||||
assert len(syms) > 2
|
||||
|
||||
|
||||
def test_symbols_requests_logged(
|
||||
src, products_response_ok, currencies_response_ok, caplog
|
||||
):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.symbols()
|
||||
matching = filter(
|
||||
lambda r: "DEBUG" == r.levelname and "curl " in r.message,
|
||||
caplog.records,
|
||||
)
|
||||
assert len(list(matching)) == 2
|
||||
|
||||
|
||||
def test_symbols_not_found(src, requests_mock, products_url, currencies_response_ok):
|
||||
requests_mock.add(responses.GET, products_url, body="[]", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "data not found" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_network_issue(
|
||||
src, requests_mock, products_response_ok, currencies_url
|
||||
):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
currencies_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_bad_status(src, requests_mock, products_url, currencies_response_ok):
|
||||
requests_mock.add(responses.GET, products_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_parsing_error(
|
||||
src, requests_mock, products_response_ok, currencies_url
|
||||
):
|
||||
requests_mock.add(responses.GET, currencies_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_known_pair(src, type, recent_response_ok):
|
||||
series = src.fetch(Series("BTC", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_response_ok.calls[0].request
|
||||
assert req.params["granularity"] == "86400"
|
||||
assert req.params["start"] == "2021-01-01"
|
||||
assert req.params["end"] == "2021-01-07"
|
||||
assert series.prices[0] == Price("2021-01-01", Decimal("23881.35"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("31208.49"))
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_types_all_available(src, recent_response_ok):
|
||||
mid = src.fetch(Series("BTC", "EUR", "mid", "2021-01-01", "2021-01-07"))
|
||||
opn = src.fetch(Series("BTC", "EUR", "open", "2021-01-01", "2021-01-07"))
|
||||
hgh = src.fetch(Series("BTC", "EUR", "high", "2021-01-01", "2021-01-07"))
|
||||
low = src.fetch(Series("BTC", "EUR", "low", "2021-01-01", "2021-01-07"))
|
||||
cls = src.fetch(Series("BTC", "EUR", "close", "2021-01-01", "2021-01-07"))
|
||||
assert mid.prices[0].amount == Decimal("23881.35")
|
||||
assert opn.prices[0].amount == Decimal("23706.73")
|
||||
assert hgh.prices[0].amount == Decimal("24250")
|
||||
assert low.prices[0].amount == Decimal("23512.7")
|
||||
assert cls.prices[0].amount == Decimal("24070.97")
|
||||
|
||||
|
||||
def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_response_ok):
|
||||
mid = src.fetch(Series("BTC", "EUR", "mid", "2021-01-01", "2021-01-07")).prices
|
||||
low = src.fetch(Series("BTC", "EUR", "low", "2021-01-01", "2021-01-07")).prices
|
||||
hgh = src.fetch(Series("BTC", "EUR", "high", "2021-01-01", "2021-01-07")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 7)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, type, recent_response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_long_hist_multi_segment(src, type, multi_response_ok):
|
||||
series = src.fetch(Series("BTC", "EUR", type, "2020-01-01", "2021-01-07"))
|
||||
assert series.prices[0] == Price("2020-01-01", Decimal("6430.175"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("31208.49"))
|
||||
assert len(series.prices) > 3
|
||||
|
||||
|
||||
def test_fetch_from_before_start(src, type, requests_mock):
|
||||
body = '{"message":"End is too old"}'
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), status=400, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "1960-01-01", "1960-01-07"))
|
||||
assert "too early" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_in_future(src, type, response_empty):
|
||||
series = src.fetch(Series("BTC", "EUR", type, "2100-01-01", "2100-01-07"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_wrong_dates_order_alledged(src, type, requests_mock):
|
||||
# Is actually prevented in argument parsing and inside the source.
|
||||
body = '{"message":"start must be before end"}'
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), status=400, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-07", "2021-01-01"))
|
||||
assert "end can't preceed" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_too_many_data_points_alledged(src, type, requests_mock):
|
||||
# Should only happen if limit is reduced or calculated segments lengthened
|
||||
body = "aggregations requested exceeds"
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), status=400, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-07", "2021-01-01"))
|
||||
assert "Too many data points" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_rate_limit(src, type, requests_mock):
|
||||
body = "Too many requests"
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), status=429, body=body)
|
||||
with pytest.raises(exceptions.RateLimit) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-07", "2021-01-01"))
|
||||
assert "rate limit has been exceeded" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_empty(src, type, response_empty):
|
||||
series = src.fetch(Series("BTC", "EUR", type, "2000-01-01", "2000-01-07"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_unknown_base(src, type, requests_mock):
|
||||
body = '{"message":"NotFound"}'
|
||||
requests_mock.add(
|
||||
responses.GET, product_url("UNKNOWN", "EUR"), status=404, body=body
|
||||
)
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("UNKNOWN", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_unknown_quote(src, type, requests_mock):
|
||||
body = '{"message":"NotFound"}'
|
||||
requests_mock.add(responses.GET, product_url("BTC", "XZY"), status=404, body=body)
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("BTC", "XZY", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_no_quote(src, type, requests_mock):
|
||||
body = '{"message":"NotFound"}'
|
||||
requests_mock.add(responses.GET, product_url("BTC", ""), status=404, body=body)
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("BTC", "", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_unknown_pair(src, type, requests_mock):
|
||||
body = '{"message":"NotFound"}'
|
||||
requests_mock.add(responses.GET, product_url("ABC", "XZY"), status=404, body=body)
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("ABC", "XZY", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET, product_url("BTC", "EUR"), status=500, body="Some other reason"
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, product_url("BTC", "EUR"), body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "EUR", type, "2021-01-01", "2021-01-07"))
|
||||
assert "while parsing data" in str(e.value)
|
|
@ -0,0 +1,18 @@
|
|||
[
|
||||
[
|
||||
1602806400,
|
||||
9588,
|
||||
9860,
|
||||
9828.84,
|
||||
9672.41,
|
||||
1068.08144123
|
||||
],
|
||||
[
|
||||
1577836800,
|
||||
6388.91,
|
||||
6471.44,
|
||||
6400.02,
|
||||
6410.22,
|
||||
491.94797816
|
||||
]
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
[
|
||||
[
|
||||
1609977600,
|
||||
29516.98,
|
||||
32900,
|
||||
29818.73,
|
||||
32120.19,
|
||||
5957.46980324
|
||||
],
|
||||
[
|
||||
1602892800,
|
||||
9630.1,
|
||||
9742.61,
|
||||
9675.29,
|
||||
9706.33,
|
||||
385.03505036
|
||||
]
|
||||
]
|
141
tests/pricehist/sources/test_coinbasepro/currencies-partial.json
Normal file
141
tests/pricehist/sources/test_coinbasepro/currencies-partial.json
Normal file
|
@ -0,0 +1,141 @@
|
|||
[
|
||||
{
|
||||
"id": "BTC",
|
||||
"name": "Bitcoin",
|
||||
"min_size": "0.00000001",
|
||||
"status": "online",
|
||||
"message": "",
|
||||
"max_precision": "0.00000001",
|
||||
"convertible_to": [],
|
||||
"details": {
|
||||
"type": "crypto",
|
||||
"symbol": "₿",
|
||||
"network_confirmations": 3,
|
||||
"sort_order": 20,
|
||||
"crypto_address_link": "https://live.blockcypher.com/btc/address/{{address}}",
|
||||
"crypto_transaction_link": "https://live.blockcypher.com/btc/tx/{{txId}}",
|
||||
"push_payment_methods": [
|
||||
"crypto"
|
||||
],
|
||||
"group_types": [
|
||||
"btc",
|
||||
"crypto"
|
||||
],
|
||||
"display_name": "",
|
||||
"processing_time_seconds": 0,
|
||||
"min_withdrawal_amount": 0.0001,
|
||||
"max_withdrawal_amount": 2400
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DOGE",
|
||||
"name": "Dogecoin",
|
||||
"min_size": "1",
|
||||
"status": "online",
|
||||
"message": "",
|
||||
"max_precision": "0.1",
|
||||
"convertible_to": [],
|
||||
"details": {
|
||||
"type": "crypto",
|
||||
"symbol": "",
|
||||
"network_confirmations": 60,
|
||||
"sort_order": 29,
|
||||
"crypto_address_link": "https://dogechain.info/address/{{address}}",
|
||||
"crypto_transaction_link": "",
|
||||
"push_payment_methods": [
|
||||
"crypto"
|
||||
],
|
||||
"group_types": [],
|
||||
"display_name": "",
|
||||
"processing_time_seconds": 0,
|
||||
"min_withdrawal_amount": 1,
|
||||
"max_withdrawal_amount": 17391300
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "ETH",
|
||||
"name": "Ether",
|
||||
"min_size": "0.00000001",
|
||||
"status": "online",
|
||||
"message": "",
|
||||
"max_precision": "0.00000001",
|
||||
"convertible_to": [],
|
||||
"details": {
|
||||
"type": "crypto",
|
||||
"symbol": "Ξ",
|
||||
"network_confirmations": 35,
|
||||
"sort_order": 25,
|
||||
"crypto_address_link": "https://etherscan.io/address/{{address}}",
|
||||
"crypto_transaction_link": "https://etherscan.io/tx/0x{{txId}}",
|
||||
"push_payment_methods": [
|
||||
"crypto"
|
||||
],
|
||||
"group_types": [
|
||||
"eth",
|
||||
"crypto"
|
||||
],
|
||||
"display_name": "",
|
||||
"processing_time_seconds": 0,
|
||||
"min_withdrawal_amount": 0.001,
|
||||
"max_withdrawal_amount": 7450
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "EUR",
|
||||
"name": "Euro",
|
||||
"min_size": "0.01",
|
||||
"status": "online",
|
||||
"message": "",
|
||||
"max_precision": "0.01",
|
||||
"convertible_to": [],
|
||||
"details": {
|
||||
"type": "fiat",
|
||||
"symbol": "€",
|
||||
"network_confirmations": 0,
|
||||
"sort_order": 2,
|
||||
"crypto_address_link": "",
|
||||
"crypto_transaction_link": "",
|
||||
"push_payment_methods": [
|
||||
"sepa_bank_account"
|
||||
],
|
||||
"group_types": [
|
||||
"fiat",
|
||||
"eur"
|
||||
],
|
||||
"display_name": "",
|
||||
"processing_time_seconds": 0,
|
||||
"min_withdrawal_amount": 0,
|
||||
"max_withdrawal_amount": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "GBP",
|
||||
"name": "British Pound",
|
||||
"min_size": "0.01",
|
||||
"status": "online",
|
||||
"message": "",
|
||||
"max_precision": "0.01",
|
||||
"convertible_to": [],
|
||||
"details": {
|
||||
"type": "fiat",
|
||||
"symbol": "£",
|
||||
"network_confirmations": 0,
|
||||
"sort_order": 3,
|
||||
"crypto_address_link": "",
|
||||
"crypto_transaction_link": "",
|
||||
"push_payment_methods": [
|
||||
"uk_bank_account",
|
||||
"swift_lhv",
|
||||
"swift"
|
||||
],
|
||||
"group_types": [
|
||||
"fiat",
|
||||
"gbp"
|
||||
],
|
||||
"display_name": "",
|
||||
"processing_time_seconds": 0,
|
||||
"min_withdrawal_amount": 0,
|
||||
"max_withdrawal_amount": 0
|
||||
}
|
||||
}
|
||||
]
|
|
@ -0,0 +1,62 @@
|
|||
[
|
||||
{
|
||||
"id": "BTC-EUR",
|
||||
"base_currency": "BTC",
|
||||
"quote_currency": "EUR",
|
||||
"base_min_size": "0.0001",
|
||||
"base_max_size": "200",
|
||||
"quote_increment": "0.01",
|
||||
"base_increment": "0.00000001",
|
||||
"display_name": "BTC/EUR",
|
||||
"min_market_funds": "10",
|
||||
"max_market_funds": "600000",
|
||||
"margin_enabled": false,
|
||||
"fx_stablecoin": false,
|
||||
"post_only": false,
|
||||
"limit_only": false,
|
||||
"cancel_only": false,
|
||||
"trading_disabled": false,
|
||||
"status": "online",
|
||||
"status_message": ""
|
||||
},
|
||||
{
|
||||
"id": "ETH-GBP",
|
||||
"base_currency": "ETH",
|
||||
"quote_currency": "GBP",
|
||||
"base_min_size": "0.001",
|
||||
"base_max_size": "1400",
|
||||
"quote_increment": "0.01",
|
||||
"base_increment": "0.00000001",
|
||||
"display_name": "ETH/GBP",
|
||||
"min_market_funds": "10",
|
||||
"max_market_funds": "1000000",
|
||||
"margin_enabled": false,
|
||||
"fx_stablecoin": false,
|
||||
"post_only": false,
|
||||
"limit_only": false,
|
||||
"cancel_only": false,
|
||||
"trading_disabled": false,
|
||||
"status": "online",
|
||||
"status_message": ""
|
||||
},
|
||||
{
|
||||
"id": "DOGE-EUR",
|
||||
"base_currency": "DOGE",
|
||||
"quote_currency": "EUR",
|
||||
"base_min_size": "1",
|
||||
"base_max_size": "690000",
|
||||
"quote_increment": "0.0001",
|
||||
"base_increment": "0.1",
|
||||
"display_name": "DOGE/EUR",
|
||||
"min_market_funds": "5.0",
|
||||
"max_market_funds": "100000",
|
||||
"margin_enabled": false,
|
||||
"fx_stablecoin": false,
|
||||
"post_only": false,
|
||||
"limit_only": false,
|
||||
"cancel_only": false,
|
||||
"trading_disabled": false,
|
||||
"status": "online",
|
||||
"status_message": ""
|
||||
}
|
||||
]
|
58
tests/pricehist/sources/test_coinbasepro/recent.json
Normal file
58
tests/pricehist/sources/test_coinbasepro/recent.json
Normal file
|
@ -0,0 +1,58 @@
|
|||
[
|
||||
[
|
||||
1609977600,
|
||||
29516.98,
|
||||
32900,
|
||||
29818.73,
|
||||
32120.19,
|
||||
5957.46980324
|
||||
],
|
||||
[
|
||||
1609891200,
|
||||
27105.01,
|
||||
29949,
|
||||
27655.04,
|
||||
29838.52,
|
||||
4227.05067035
|
||||
],
|
||||
[
|
||||
1609804800,
|
||||
24413.62,
|
||||
27989,
|
||||
26104.4,
|
||||
27654.01,
|
||||
4036.27720179
|
||||
],
|
||||
[
|
||||
1609718400,
|
||||
22055,
|
||||
26199,
|
||||
25624.7,
|
||||
26115.94,
|
||||
6304.41029978
|
||||
],
|
||||
[
|
||||
1609632000,
|
||||
24500,
|
||||
27195.46,
|
||||
25916.75,
|
||||
25644.41,
|
||||
4975.13927959
|
||||
],
|
||||
[
|
||||
1609545600,
|
||||
22000,
|
||||
27000,
|
||||
24071.26,
|
||||
25907.35,
|
||||
7291.88538639
|
||||
],
|
||||
[
|
||||
1609459200,
|
||||
23512.7,
|
||||
24250,
|
||||
23706.73,
|
||||
24070.97,
|
||||
1830.04655405
|
||||
]
|
||||
]
|
261
tests/pricehist/sources/test_coindesk.py
Normal file
261
tests/pricehist/sources/test_coindesk.py
Normal file
|
@ -0,0 +1,261 @@
|
|||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.coindesk import CoinDesk
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return CoinDesk()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_url():
|
||||
return "https://api.coindesk.com/v1/bpi/supported-currencies.json"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fetch_url():
|
||||
return "https://api.coindesk.com/v1/bpi/historical/close.json"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_json():
|
||||
dir = Path(os.path.splitext(__file__)[0])
|
||||
return (dir / "supported-currencies-partial.json").read_text()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def currencies_response_ok(requests_mock, currencies_url, currencies_json):
|
||||
requests_mock.add(responses.GET, currencies_url, body=currencies_json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recent_response_ok(requests_mock, fetch_url):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "recent.json").read_text()
|
||||
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def all_response_ok(requests_mock, fetch_url):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "all-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def not_found_response(requests_mock, fetch_url):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
fetch_url,
|
||||
status=404,
|
||||
body="Sorry, that currency was not found",
|
||||
)
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("btc") == "BTC"
|
||||
assert src.normalizesymbol("usd") == "USD"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, currencies_response_ok):
|
||||
syms = src.symbols()
|
||||
assert ("BTC/AUD", "Bitcoin against Australian Dollar") in syms
|
||||
assert len(syms) > 3
|
||||
|
||||
|
||||
def test_symbols_requests_logged(src, currencies_response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.symbols()
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_symbols_not_found(src, requests_mock, currencies_url):
|
||||
requests_mock.add(responses.GET, currencies_url, body="[]", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "data not found" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_network_issue(src, requests_mock, currencies_url):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
currencies_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_bad_status(src, requests_mock, currencies_url):
|
||||
requests_mock.add(responses.GET, currencies_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_parsing_error(src, requests_mock, currencies_url):
|
||||
requests_mock.add(responses.GET, currencies_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_known_pair(src, type, recent_response_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_response_ok.calls[0].request
|
||||
assert req.params["currency"] == "AUD"
|
||||
assert req.params["start"] == "2021-01-01"
|
||||
assert req.params["end"] == "2021-01-07"
|
||||
assert series.prices[0] == Price("2021-01-01", Decimal("38204.8987"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("50862.227"))
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, type, recent_response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_long_hist_from_start(src, type, all_response_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, src.start(), "2021-01-07"))
|
||||
assert series.prices[0] == Price("2010-07-18", Decimal("0.0984"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("50862.227"))
|
||||
assert len(series.prices) > 13
|
||||
|
||||
|
||||
def test_fetch_from_before_start(src, type, requests_mock, fetch_url):
|
||||
body = "Sorry, the CoinDesk BPI only covers data from 2010-07-17 onwards."
|
||||
requests_mock.add(responses.GET, fetch_url, status=404, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2010-01-01", "2010-07-24"))
|
||||
assert "only covers data from" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_to_future(src, type, all_response_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2100-01-01"))
|
||||
assert len(series.prices) > 0
|
||||
|
||||
|
||||
def test_wrong_dates_order(src, type, requests_mock, fetch_url):
|
||||
body = "Sorry, but your specified end date is before your start date."
|
||||
requests_mock.add(responses.GET, fetch_url, status=404, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-07", "2021-01-01"))
|
||||
assert "End date is before start date." in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_in_future(src, type, requests_mock, fetch_url):
|
||||
body = "Sorry, but your specified end date is before your start date."
|
||||
requests_mock.add(responses.GET, fetch_url, status=404, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2030-01-01", "2030-01-07"))
|
||||
assert "start date must be in the past" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_empty(src, type, requests_mock, fetch_url):
|
||||
requests_mock.add(responses.GET, fetch_url, body="{}")
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2010-07-17", "2010-07-17"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_known_pair_no_data(src, type, requests_mock, fetch_url):
|
||||
body = "No results returned from database"
|
||||
requests_mock.add(responses.GET, fetch_url, status=500, body=body)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "CUP", type, "2010-07-17", "2010-07-23"))
|
||||
assert "No results returned from database" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_non_btc_base(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("USD", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_unknown_quote(src, type, not_found_response):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("BTC", "XZY", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_no_quote(src, type, not_found_response):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("BTC", "", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_unknown_pair(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("ABC", "XZY", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, requests_mock, fetch_url):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, fetch_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, requests_mock, fetch_url):
|
||||
requests_mock.add(responses.GET, fetch_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, requests_mock, fetch_url):
|
||||
requests_mock.add(responses.GET, fetch_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "while parsing data" in str(e.value)
|
23
tests/pricehist/sources/test_coindesk/all-partial.json
Normal file
23
tests/pricehist/sources/test_coindesk/all-partial.json
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"bpi": {
|
||||
"2010-07-18": 0.0984,
|
||||
"2010-07-19": 0.093,
|
||||
"2010-07-20": 0.0851,
|
||||
"2010-07-21": 0.0898,
|
||||
"2010-07-22": 0.0567,
|
||||
"2010-07-23": 0.07,
|
||||
"2010-07-24": 0.0609,
|
||||
"2021-01-01": 38204.8987,
|
||||
"2021-01-02": 41853.1942,
|
||||
"2021-01-03": 42925.6366,
|
||||
"2021-01-04": 41751.2249,
|
||||
"2021-01-05": 43890.3534,
|
||||
"2021-01-06": 47190.09,
|
||||
"2021-01-07": 50862.227
|
||||
},
|
||||
"disclaimer": "This data was produced from the CoinDesk Bitcoin Price Index. BPI value data returned as AUD.",
|
||||
"time": {
|
||||
"updated": "Jan 8, 2021 00:03:00 UTC",
|
||||
"updatedISO": "2021-01-08T00:03:00+00:00"
|
||||
}
|
||||
}
|
16
tests/pricehist/sources/test_coindesk/recent.json
Normal file
16
tests/pricehist/sources/test_coindesk/recent.json
Normal file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"bpi": {
|
||||
"2021-01-01": 38204.8987,
|
||||
"2021-01-02": 41853.1942,
|
||||
"2021-01-03": 42925.6366,
|
||||
"2021-01-04": 41751.2249,
|
||||
"2021-01-05": 43890.3534,
|
||||
"2021-01-06": 47190.09,
|
||||
"2021-01-07": 50862.227
|
||||
},
|
||||
"disclaimer": "This data was produced from the CoinDesk Bitcoin Price Index. BPI value data returned as AUD.",
|
||||
"time": {
|
||||
"updated": "Jan 8, 2021 00:03:00 UTC",
|
||||
"updatedISO": "2021-01-08T00:03:00+00:00"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"currency": "AUD",
|
||||
"country": "Australian Dollar"
|
||||
},
|
||||
{
|
||||
"currency": "BTC",
|
||||
"country": "Bitcoin"
|
||||
},
|
||||
{
|
||||
"currency": "CUP",
|
||||
"country": "Cuban Peso"
|
||||
},
|
||||
{
|
||||
"currency": "EUR",
|
||||
"country": "Euro"
|
||||
},
|
||||
{
|
||||
"currency": "USD",
|
||||
"country": "United States Dollar"
|
||||
},
|
||||
{
|
||||
"currency": "XBT",
|
||||
"country": "Bitcoin"
|
||||
}
|
||||
]
|
310
tests/pricehist/sources/test_coinmarketcap.py
Normal file
310
tests/pricehist/sources/test_coinmarketcap.py
Normal file
|
@ -0,0 +1,310 @@
|
|||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.coinmarketcap import CoinMarketCap
|
||||
|
||||
|
||||
def timestamp(date):
|
||||
return int(
|
||||
datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return CoinMarketCap()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
crypto_url = (
|
||||
"https://api.coinmarketcap.com/data-api/v1/cryptocurrency/map?sort=cmc_rank"
|
||||
)
|
||||
fetch_url = "https://api.coinmarketcap.com/data-api/v3.1/cryptocurrency/historical"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def crypto_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "crypto-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, crypto_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recent_id_id_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "recent-id1-id2782.json").read_text()
|
||||
requests_mock.add(responses.GET, fetch_url, body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("btc") == "BTC"
|
||||
assert src.normalizesymbol("id=1") == "ID=1"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, crypto_ok):
|
||||
syms = src.symbols()
|
||||
assert ("id=1", "BTC Bitcoin") in syms
|
||||
assert ("id=2782", "AUD Australian Dollar") in syms
|
||||
assert len(syms) > 2
|
||||
|
||||
|
||||
def test_symbols_request_logged(src, crypto_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.symbols()
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and "curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 1
|
||||
|
||||
|
||||
def test_symbols_crypto_not_found(src, requests_mock):
|
||||
requests_mock.add(responses.GET, crypto_url, body="{}", status=200)
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Unexpected content" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_crypto_network_issue(src, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
crypto_url,
|
||||
body=requests.exceptions.ConnectionError("Network issue"),
|
||||
)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.symbols()
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_crypto_bad_status(src, requests_mock):
|
||||
requests_mock.add(responses.GET, crypto_url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.symbols()
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_crypto_parsing_error(src, requests_mock):
|
||||
requests_mock.add(responses.GET, crypto_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_symbols_no_data(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, crypto_url, body='{"data": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "Empty data section" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_known_pair_id_id(src, type, recent_id_id_ok, crypto_ok):
|
||||
series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_id_id_ok.calls[0].request
|
||||
assert req.params["id"] == "1"
|
||||
assert req.params["convertId"] == "2782"
|
||||
assert (series.base, series.quote) == ("BTC", "AUD")
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_known_pair_id_sym(src, type, recent_id_id_ok, crypto_ok):
|
||||
series = src.fetch(Series("ID=1", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_id_id_ok.calls[1].request
|
||||
assert req.params["id"] == "1"
|
||||
assert req.params["convertId"] == "2782"
|
||||
assert (series.base, series.quote) == ("BTC", "AUD")
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_known_pair_sym_id(src, type, recent_id_id_ok, crypto_ok):
|
||||
series = src.fetch(Series("BTC", "ID=2782", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_id_id_ok.calls[1].request
|
||||
assert req.params["id"] == "1"
|
||||
assert req.params["convertId"] == "2782"
|
||||
assert (series.base, series.quote) == ("BTC", "AUD")
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_known_pair_sym_sym(src, type, recent_id_id_ok, crypto_ok):
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_id_id_ok.calls[1].request
|
||||
assert req.params["id"] == "1"
|
||||
assert req.params["convertId"] == "2782"
|
||||
assert len(series.prices) == 7
|
||||
|
||||
|
||||
def test_fetch_requests_and_receives_correct_times(
|
||||
src, type, recent_id_id_ok, crypto_ok
|
||||
):
|
||||
series = src.fetch(Series("ID=1", "ID=2782", type, "2021-01-01", "2021-01-07"))
|
||||
req = recent_id_id_ok.calls[0].request
|
||||
assert req.params["timeStart"] == str(timestamp("2020-12-31")) # back one period
|
||||
assert req.params["timeEnd"] == str(timestamp("2021-01-07"))
|
||||
assert series.prices[0] == Price("2021-01-01", Decimal("37914.35060237985"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("49369.66288590665"))
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, type, crypto_ok, recent_id_id_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_types_all_available(src, crypto_ok, recent_id_id_ok):
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07"))
|
||||
opn = src.fetch(Series("BTC", "AUD", "open", "2021-01-01", "2021-01-07"))
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07"))
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07"))
|
||||
cls = src.fetch(Series("BTC", "AUD", "close", "2021-01-01", "2021-01-07"))
|
||||
assert mid.prices[0].amount == Decimal("37914.35060237985")
|
||||
assert opn.prices[0].amount == Decimal("37658.1146368474")
|
||||
assert hgh.prices[0].amount == Decimal("38417.9137031205")
|
||||
assert low.prices[0].amount == Decimal("37410.7875016392")
|
||||
assert cls.prices[0].amount == Decimal("38181.9913330076")
|
||||
|
||||
|
||||
def test_fetch_type_mid_is_mean_of_low_and_high(src, crypto_ok, recent_id_id_ok):
|
||||
mid = src.fetch(Series("BTC", "AUD", "mid", "2021-01-01", "2021-01-07")).prices
|
||||
low = src.fetch(Series("BTC", "AUD", "low", "2021-01-01", "2021-01-07")).prices
|
||||
hgh = src.fetch(Series("BTC", "AUD", "high", "2021-01-01", "2021-01-07")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 7)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_empty(src, type, crypto_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
fetch_url,
|
||||
body="""{
|
||||
"data": {
|
||||
"id": 1,
|
||||
"name": "Bitcoin",
|
||||
"symbol": "BTC",
|
||||
"timeEnd": "1228348799",
|
||||
"quotes": []
|
||||
},
|
||||
"status": {
|
||||
"timestamp": "2024-08-03T09:31:52.719Z",
|
||||
"error_code": "0",
|
||||
"error_message": "SUCCESS",
|
||||
"elapsed": "14",
|
||||
"credit_count": 0
|
||||
}
|
||||
}""",
|
||||
)
|
||||
series = src.fetch(Series("BTC", "AUD", type, "2010-01-01", "2010-01-07"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_bad_base_sym(src, type, crypto_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTABASE", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Invalid symbol 'NOTABASE'" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_quote_sym(src, type, crypto_ok):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("BTC", "NOTAQUOTE", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Invalid symbol 'NOTAQUOTE'" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_response(src, type, crypto_ok, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
fetch_url,
|
||||
status=200,
|
||||
body="""{
|
||||
"status": {
|
||||
"timestamp": "2024-08-03T09:42:43.699Z",
|
||||
"error_code": "500",
|
||||
"error_message": "The system is busy, please try again later!",
|
||||
"elapsed": "0",
|
||||
"credit_count": 0
|
||||
}
|
||||
}""",
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("ID=987654321", "USD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "general error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_no_quote(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("BTC", "", type, "2021-01-01", "2021-01-07"))
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, crypto_ok, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, fetch_url, body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, crypto_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, fetch_url, status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, crypto_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, fetch_url, body="NOT JSON")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "while parsing data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_unexpected_json(src, type, crypto_ok, requests_mock):
|
||||
requests_mock.add(responses.GET, fetch_url, body='{"notdata": []}')
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("BTC", "AUD", type, "2021-01-01", "2021-01-07"))
|
||||
assert "Unexpected content" in str(e.value)
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"status": {
|
||||
"timestamp": "2021-07-16T10:08:28.938Z",
|
||||
"error_code": 0,
|
||||
"error_message": null,
|
||||
"elapsed": 18,
|
||||
"credit_count": 0,
|
||||
"notice": null
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Bitcoin",
|
||||
"symbol": "BTC",
|
||||
"slug": "bitcoin",
|
||||
"rank": 1,
|
||||
"is_active": 1,
|
||||
"first_historical_data": "2013-04-28T18:47:21.000Z",
|
||||
"last_historical_data": "2021-07-16T09:59:03.000Z",
|
||||
"platform": null
|
||||
},
|
||||
{
|
||||
"id": 1027,
|
||||
"name": "Ethereum",
|
||||
"symbol": "ETH",
|
||||
"slug": "ethereum",
|
||||
"rank": 2,
|
||||
"is_active": 1,
|
||||
"first_historical_data": "2015-08-07T14:49:30.000Z",
|
||||
"last_historical_data": "2021-07-16T09:59:04.000Z",
|
||||
"platform": null
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
{
|
||||
"data": {
|
||||
"id": 1,
|
||||
"name": "Bitcoin",
|
||||
"symbol": "BTC",
|
||||
"timeEnd": "1575503999",
|
||||
"quotes": [
|
||||
{
|
||||
"timeOpen": "2021-01-01T00:00:00.000Z",
|
||||
"timeClose": "2021-01-01T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-01T12:38:43.000Z",
|
||||
"timeLow": "2021-01-01T00:16:43.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 37658.1146368474,
|
||||
"high": 38417.9137031205,
|
||||
"low": 37410.7875016392,
|
||||
"close": 38181.9913330076,
|
||||
"volume": 52901492931.8344367080,
|
||||
"marketCap": 709159975413.2388897949,
|
||||
"timestamp": "2021-01-01T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-02T00:00:00.000Z",
|
||||
"timeClose": "2021-01-02T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-02T19:49:42.000Z",
|
||||
"timeLow": "2021-01-02T00:31:44.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 38184.9861160068,
|
||||
"high": 43096.6811974230,
|
||||
"low": 37814.1718709653,
|
||||
"close": 41760.6292307951,
|
||||
"volume": 88214867181.9830439141,
|
||||
"marketCap": 776278147177.8037261338,
|
||||
"timestamp": "2021-01-02T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-03T00:00:00.000Z",
|
||||
"timeClose": "2021-01-03T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-03T07:47:38.000Z",
|
||||
"timeLow": "2021-01-03T00:20:45.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 41763.4101511766,
|
||||
"high": 44985.9324758502,
|
||||
"low": 41663.2043506016,
|
||||
"close": 42534.0538859236,
|
||||
"volume": 102253005977.1115650988,
|
||||
"marketCap": 792140565709.1701340036,
|
||||
"timestamp": "2021-01-03T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-04T00:00:00.000Z",
|
||||
"timeClose": "2021-01-04T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-04T04:07:42.000Z",
|
||||
"timeLow": "2021-01-04T10:19:42.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 42548.6134964877,
|
||||
"high": 43347.7527651400,
|
||||
"low": 37111.8678479690,
|
||||
"close": 41707.4890765162,
|
||||
"volume": 105251252720.3013091567,
|
||||
"marketCap": 770785910830.3801120744,
|
||||
"timestamp": "2021-01-04T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-05T00:00:00.000Z",
|
||||
"timeClose": "2021-01-05T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-05T22:44:35.000Z",
|
||||
"timeLow": "2021-01-05T06:16:41.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 41693.0732180764,
|
||||
"high": 44406.6531914952,
|
||||
"low": 39220.9654861842,
|
||||
"close": 43777.4560620835,
|
||||
"volume": 88071174132.6445648582,
|
||||
"marketCap": 824003338903.4613958343,
|
||||
"timestamp": "2021-01-05T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-06T00:00:00.000Z",
|
||||
"timeClose": "2021-01-06T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-06T23:57:36.000Z",
|
||||
"timeLow": "2021-01-06T00:25:38.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 43798.3790529373,
|
||||
"high": 47185.7303335186,
|
||||
"low": 43152.6028176424,
|
||||
"close": 47114.9330444897,
|
||||
"volume": 96948095813.7503737302,
|
||||
"marketCap": 881631993096.0701475336,
|
||||
"timestamp": "2021-01-06T23:59:59.999Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"timeOpen": "2021-01-07T00:00:00.000Z",
|
||||
"timeClose": "2021-01-07T23:59:59.999Z",
|
||||
"timeHigh": "2021-01-07T18:17:42.000Z",
|
||||
"timeLow": "2021-01-07T08:25:51.000Z",
|
||||
"quote": {
|
||||
"name": "2782",
|
||||
"open": 47128.0213932810,
|
||||
"high": 51832.6746004172,
|
||||
"low": 46906.6511713961,
|
||||
"close": 50660.9643451606,
|
||||
"volume": 108451040396.2660095877,
|
||||
"marketCap": 936655898949.2177196744,
|
||||
"timestamp": "2021-01-07T23:59:59.999Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"status": {
|
||||
"timestamp": "2024-08-02T18:23:21.586Z",
|
||||
"error_code": "0",
|
||||
"error_message": "SUCCESS",
|
||||
"elapsed": "212",
|
||||
"credit_count": 0
|
||||
}
|
||||
}
|
212
tests/pricehist/sources/test_ecb.py
Normal file
212
tests/pricehist/sources/test_ecb.py
Normal file
|
@ -0,0 +1,212 @@
|
|||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions, isocurrencies
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.ecb import ECB
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return ECB()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def url():
|
||||
return "https://www.ecb.europa.eu/stats/eurofxref/eurofxref-hist.xml"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def url_90d():
|
||||
return "https://www.ecb.europa.eu/stats/eurofxref/eurofxref-hist-90d.xml"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def xml():
|
||||
dir = Path(os.path.splitext(__file__)[0])
|
||||
return (dir / "eurofxref-hist-partial.xml").read_text()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def response_ok(requests_mock, url, xml):
|
||||
requests_mock.add(responses.GET, url, body=xml, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def response_ok_90d(requests_mock, url_90d, xml):
|
||||
requests_mock.add(responses.GET, url_90d, body=xml, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def response_empty_xml(requests_mock, url):
|
||||
empty_xml = (
|
||||
Path(os.path.splitext(__file__)[0]) / "eurofxref-hist-empty.xml"
|
||||
).read_text()
|
||||
requests_mock.add(responses.GET, url, body=empty_xml, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("eur") == "EUR"
|
||||
assert src.normalizesymbol("symbol") == "SYMBOL"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, response_ok):
|
||||
syms = src.symbols()
|
||||
assert ("EUR/AUD", "Euro against Australian Dollar") in syms
|
||||
assert len(syms) > 40
|
||||
|
||||
|
||||
def test_symbols_requests_logged_for(src, response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.symbols()
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_symbols_not_in_iso_data(src, response_ok, monkeypatch):
|
||||
iso = isocurrencies.by_code()
|
||||
del iso["AUD"]
|
||||
monkeypatch.setattr(isocurrencies, "by_code", lambda: iso)
|
||||
syms = src.symbols()
|
||||
assert ("EUR/AUD", "Euro against AUD") in syms
|
||||
|
||||
|
||||
def test_symbols_not_found(src, response_empty_xml):
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.symbols()
|
||||
assert "data not found" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_known_pair(src, type, response_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("1.5928"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("1.5758"))
|
||||
assert len(series.prices) == 5
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, response_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert any(
|
||||
["DEBUG" == r.levelname and "curl " in r.message for r in caplog.records]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_recent_interval_uses_90d_data(src, type, response_ok_90d):
|
||||
today = datetime.now().date()
|
||||
start = (today - timedelta(days=80)).isoformat()
|
||||
end = today.isoformat()
|
||||
src.fetch(Series("EUR", "AUD", type, start, end))
|
||||
assert len(response_ok_90d.calls) > 0
|
||||
|
||||
|
||||
def test_fetch_long_hist_from_start(src, type, response_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, src.start(), "2021-01-08"))
|
||||
assert series.prices[0] == Price("1999-01-04", Decimal("1.91"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("1.5758"))
|
||||
assert len(series.prices) > 9
|
||||
|
||||
|
||||
def test_fetch_from_before_start(src, type, response_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, "1998-12-01", "1999-01-10"))
|
||||
assert series.prices[0] == Price("1999-01-04", Decimal("1.91"))
|
||||
assert series.prices[-1] == Price("1999-01-08", Decimal("1.8406"))
|
||||
assert len(series.prices) == 5
|
||||
|
||||
|
||||
def test_fetch_to_future(src, type, response_ok):
|
||||
series = src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2100-01-01"))
|
||||
assert len(series.prices) > 0
|
||||
|
||||
|
||||
def test_fetch_known_pair_no_data(src, type, response_ok):
|
||||
series = src.fetch(Series("EUR", "ROL", type, "2021-01-04", "2021-02-08"))
|
||||
assert len(series.prices) == 0
|
||||
|
||||
|
||||
def test_fetch_non_eur_base(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("USD", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
|
||||
|
||||
def test_fetch_unknown_quote(src, type, response_ok):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("EUR", "XZY", type, "2021-01-04", "2021-01-08"))
|
||||
|
||||
|
||||
def test_fetch_no_quote(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("EUR", "", type, "2021-01-04", "2021-01-08"))
|
||||
|
||||
|
||||
def test_fetch_unknown_pair(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair):
|
||||
src.fetch(Series("ABC", "XZY", type, "2021-01-04", "2021-01-08"))
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, requests_mock, url):
|
||||
err = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, url, body=err)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, requests_mock, url):
|
||||
requests_mock.add(responses.GET, url, status=500)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, requests_mock, url):
|
||||
requests_mock.add(responses.GET, url, body="NOT XML")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("EUR", "AUD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "while parsing data" in str(e.value)
|
|
@ -0,0 +1,9 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<gesmes:Envelope xmlns:gesmes="http://www.gesmes.org/xml/2002-08-01" xmlns="http://www.ecb.int/vocabulary/2002-08-01/eurofxref">
|
||||
<gesmes:subject>Reference rates</gesmes:subject>
|
||||
<gesmes:Sender>
|
||||
<gesmes:name>European Central Bank</gesmes:name>
|
||||
</gesmes:Sender>
|
||||
<Cube>
|
||||
</Cube>
|
||||
</gesmes:Envelope>
|
358
tests/pricehist/sources/test_ecb/eurofxref-hist-partial.xml
Normal file
358
tests/pricehist/sources/test_ecb/eurofxref-hist-partial.xml
Normal file
|
@ -0,0 +1,358 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<gesmes:Envelope xmlns:gesmes="http://www.gesmes.org/xml/2002-08-01" xmlns="http://www.ecb.int/vocabulary/2002-08-01/eurofxref">
|
||||
<gesmes:subject>Reference rates</gesmes:subject>
|
||||
<gesmes:Sender>
|
||||
<gesmes:name>European Central Bank</gesmes:name>
|
||||
</gesmes:Sender>
|
||||
<Cube>
|
||||
<Cube time="2021-01-08">
|
||||
<Cube currency="USD" rate="1.225"/>
|
||||
<Cube currency="JPY" rate="127.26"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="26.163"/>
|
||||
<Cube currency="DKK" rate="7.4369"/>
|
||||
<Cube currency="GBP" rate="0.90128"/>
|
||||
<Cube currency="HUF" rate="359.62"/>
|
||||
<Cube currency="PLN" rate="4.5113"/>
|
||||
<Cube currency="RON" rate="4.8708"/>
|
||||
<Cube currency="SEK" rate="10.051"/>
|
||||
<Cube currency="CHF" rate="1.0827"/>
|
||||
<Cube currency="ISK" rate="155.5"/>
|
||||
<Cube currency="NOK" rate="10.2863"/>
|
||||
<Cube currency="HRK" rate="7.569"/>
|
||||
<Cube currency="RUB" rate="90.8"/>
|
||||
<Cube currency="TRY" rate="9.0146"/>
|
||||
<Cube currency="AUD" rate="1.5758"/>
|
||||
<Cube currency="BRL" rate="6.5748"/>
|
||||
<Cube currency="CAD" rate="1.5543"/>
|
||||
<Cube currency="CNY" rate="7.9184"/>
|
||||
<Cube currency="HKD" rate="9.4982"/>
|
||||
<Cube currency="IDR" rate="17247.33"/>
|
||||
<Cube currency="ILS" rate="3.8981"/>
|
||||
<Cube currency="INR" rate="89.7975"/>
|
||||
<Cube currency="KRW" rate="1337.9"/>
|
||||
<Cube currency="MXN" rate="24.4718"/>
|
||||
<Cube currency="MYR" rate="4.9359"/>
|
||||
<Cube currency="NZD" rate="1.6883"/>
|
||||
<Cube currency="PHP" rate="58.947"/>
|
||||
<Cube currency="SGD" rate="1.6228"/>
|
||||
<Cube currency="THB" rate="36.848"/>
|
||||
<Cube currency="ZAR" rate="18.7212"/>
|
||||
</Cube>
|
||||
<Cube time="2021-01-07">
|
||||
<Cube currency="USD" rate="1.2276"/>
|
||||
<Cube currency="JPY" rate="127.13"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="26.147"/>
|
||||
<Cube currency="DKK" rate="7.4392"/>
|
||||
<Cube currency="GBP" rate="0.9019"/>
|
||||
<Cube currency="HUF" rate="357.79"/>
|
||||
<Cube currency="PLN" rate="4.4998"/>
|
||||
<Cube currency="RON" rate="4.8712"/>
|
||||
<Cube currency="SEK" rate="10.0575"/>
|
||||
<Cube currency="CHF" rate="1.0833"/>
|
||||
<Cube currency="ISK" rate="155.3"/>
|
||||
<Cube currency="NOK" rate="10.3435"/>
|
||||
<Cube currency="HRK" rate="7.566"/>
|
||||
<Cube currency="RUB" rate="91.2"/>
|
||||
<Cube currency="TRY" rate="8.9987"/>
|
||||
<Cube currency="AUD" rate="1.5836"/>
|
||||
<Cube currency="BRL" rate="6.5172"/>
|
||||
<Cube currency="CAD" rate="1.5601"/>
|
||||
<Cube currency="CNY" rate="7.9392"/>
|
||||
<Cube currency="HKD" rate="9.5176"/>
|
||||
<Cube currency="IDR" rate="17259.99"/>
|
||||
<Cube currency="ILS" rate="3.9027"/>
|
||||
<Cube currency="INR" rate="90.0455"/>
|
||||
<Cube currency="KRW" rate="1342.29"/>
|
||||
<Cube currency="MXN" rate="24.2552"/>
|
||||
<Cube currency="MYR" rate="4.957"/>
|
||||
<Cube currency="NZD" rate="1.6907"/>
|
||||
<Cube currency="PHP" rate="59.043"/>
|
||||
<Cube currency="SGD" rate="1.6253"/>
|
||||
<Cube currency="THB" rate="36.859"/>
|
||||
<Cube currency="ZAR" rate="18.7919"/>
|
||||
</Cube>
|
||||
<Cube time="2021-01-06">
|
||||
<Cube currency="USD" rate="1.2338"/>
|
||||
<Cube currency="JPY" rate="127.03"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="26.145"/>
|
||||
<Cube currency="DKK" rate="7.4393"/>
|
||||
<Cube currency="GBP" rate="0.90635"/>
|
||||
<Cube currency="HUF" rate="357.86"/>
|
||||
<Cube currency="PLN" rate="4.516"/>
|
||||
<Cube currency="RON" rate="4.872"/>
|
||||
<Cube currency="SEK" rate="10.0653"/>
|
||||
<Cube currency="CHF" rate="1.0821"/>
|
||||
<Cube currency="ISK" rate="156.3"/>
|
||||
<Cube currency="NOK" rate="10.381"/>
|
||||
<Cube currency="HRK" rate="7.5595"/>
|
||||
<Cube currency="RUB" rate="90.8175"/>
|
||||
<Cube currency="TRY" rate="9.0554"/>
|
||||
<Cube currency="AUD" rate="1.5824"/>
|
||||
<Cube currency="BRL" rate="6.5119"/>
|
||||
<Cube currency="CAD" rate="1.564"/>
|
||||
<Cube currency="CNY" rate="7.9653"/>
|
||||
<Cube currency="HKD" rate="9.5659"/>
|
||||
<Cube currency="IDR" rate="17168.2"/>
|
||||
<Cube currency="ILS" rate="3.9289"/>
|
||||
<Cube currency="INR" rate="90.204"/>
|
||||
<Cube currency="KRW" rate="1339.3"/>
|
||||
<Cube currency="MXN" rate="24.3543"/>
|
||||
<Cube currency="MYR" rate="4.9482"/>
|
||||
<Cube currency="NZD" rate="1.6916"/>
|
||||
<Cube currency="PHP" rate="59.296"/>
|
||||
<Cube currency="SGD" rate="1.6246"/>
|
||||
<Cube currency="THB" rate="36.921"/>
|
||||
<Cube currency="ZAR" rate="18.5123"/>
|
||||
</Cube>
|
||||
<Cube time="2021-01-05">
|
||||
<Cube currency="USD" rate="1.2271"/>
|
||||
<Cube currency="JPY" rate="126.25"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="26.227"/>
|
||||
<Cube currency="DKK" rate="7.4387"/>
|
||||
<Cube currency="GBP" rate="0.90333"/>
|
||||
<Cube currency="HUF" rate="360.27"/>
|
||||
<Cube currency="PLN" rate="4.5473"/>
|
||||
<Cube currency="RON" rate="4.8721"/>
|
||||
<Cube currency="SEK" rate="10.057"/>
|
||||
<Cube currency="CHF" rate="1.0803"/>
|
||||
<Cube currency="ISK" rate="156.1"/>
|
||||
<Cube currency="NOK" rate="10.4713"/>
|
||||
<Cube currency="HRK" rate="7.5588"/>
|
||||
<Cube currency="RUB" rate="91.6715"/>
|
||||
<Cube currency="TRY" rate="9.0694"/>
|
||||
<Cube currency="AUD" rate="1.5927"/>
|
||||
<Cube currency="BRL" rate="6.5517"/>
|
||||
<Cube currency="CAD" rate="1.5651"/>
|
||||
<Cube currency="CNY" rate="7.9315"/>
|
||||
<Cube currency="HKD" rate="9.5136"/>
|
||||
<Cube currency="IDR" rate="17075.1"/>
|
||||
<Cube currency="ILS" rate="3.9277"/>
|
||||
<Cube currency="INR" rate="89.867"/>
|
||||
<Cube currency="KRW" rate="1335.85"/>
|
||||
<Cube currency="MXN" rate="24.586"/>
|
||||
<Cube currency="MYR" rate="4.9293"/>
|
||||
<Cube currency="NZD" rate="1.7036"/>
|
||||
<Cube currency="PHP" rate="59.02"/>
|
||||
<Cube currency="SGD" rate="1.618"/>
|
||||
<Cube currency="THB" rate="36.776"/>
|
||||
<Cube currency="ZAR" rate="18.4194"/>
|
||||
</Cube>
|
||||
<Cube time="2021-01-04">
|
||||
<Cube currency="USD" rate="1.2296"/>
|
||||
<Cube currency="JPY" rate="126.62"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="26.141"/>
|
||||
<Cube currency="DKK" rate="7.4379"/>
|
||||
<Cube currency="GBP" rate="0.9016"/>
|
||||
<Cube currency="HUF" rate="361.32"/>
|
||||
<Cube currency="PLN" rate="4.5475"/>
|
||||
<Cube currency="RON" rate="4.8713"/>
|
||||
<Cube currency="SEK" rate="10.0895"/>
|
||||
<Cube currency="CHF" rate="1.0811"/>
|
||||
<Cube currency="ISK" rate="156.1"/>
|
||||
<Cube currency="NOK" rate="10.444"/>
|
||||
<Cube currency="HRK" rate="7.5565"/>
|
||||
<Cube currency="RUB" rate="90.342"/>
|
||||
<Cube currency="TRY" rate="9.0579"/>
|
||||
<Cube currency="AUD" rate="1.5928"/>
|
||||
<Cube currency="BRL" rate="6.3241"/>
|
||||
<Cube currency="CAD" rate="1.5621"/>
|
||||
<Cube currency="CNY" rate="7.9484"/>
|
||||
<Cube currency="HKD" rate="9.533"/>
|
||||
<Cube currency="IDR" rate="17062.67"/>
|
||||
<Cube currency="ILS" rate="3.943"/>
|
||||
<Cube currency="INR" rate="89.789"/>
|
||||
<Cube currency="KRW" rate="1332.03"/>
|
||||
<Cube currency="MXN" rate="24.3031"/>
|
||||
<Cube currency="MYR" rate="4.9264"/>
|
||||
<Cube currency="NZD" rate="1.7065"/>
|
||||
<Cube currency="PHP" rate="59.058"/>
|
||||
<Cube currency="SGD" rate="1.6198"/>
|
||||
<Cube currency="THB" rate="36.728"/>
|
||||
<Cube currency="ZAR" rate="17.9214"/>
|
||||
</Cube>
|
||||
<Cube time="2021-06-25">
|
||||
<Cube currency="USD" rate="1.195"/>
|
||||
<Cube currency="JPY" rate="132.27"/>
|
||||
<Cube currency="BGN" rate="1.9558"/>
|
||||
<Cube currency="CZK" rate="25.487"/>
|
||||
<Cube currency="DKK" rate="7.4363"/>
|
||||
<Cube currency="GBP" rate="0.8595"/>
|
||||
<Cube currency="HUF" rate="351.88"/>
|
||||
<Cube currency="PLN" rate="4.5132"/>
|
||||
<Cube currency="RON" rate="4.9263"/>
|
||||
<Cube currency="SEK" rate="10.1103"/>
|
||||
<Cube currency="CHF" rate="1.0956"/>
|
||||
<Cube currency="ISK" rate="147.1"/>
|
||||
<Cube currency="NOK" rate="10.136"/>
|
||||
<Cube currency="HRK" rate="7.4975"/>
|
||||
<Cube currency="RUB" rate="86.188"/>
|
||||
<Cube currency="TRY" rate="10.3887"/>
|
||||
<Cube currency="AUD" rate="1.5726"/>
|
||||
<Cube currency="BRL" rate="5.8635"/>
|
||||
<Cube currency="CAD" rate="1.4696"/>
|
||||
<Cube currency="CNY" rate="7.7139"/>
|
||||
<Cube currency="HKD" rate="9.2751"/>
|
||||
<Cube currency="IDR" rate="17245.4"/>
|
||||
<Cube currency="ILS" rate="3.8811"/>
|
||||
<Cube currency="INR" rate="88.6824"/>
|
||||
<Cube currency="KRW" rate="1346.35"/>
|
||||
<Cube currency="MXN" rate="23.6766"/>
|
||||
<Cube currency="MYR" rate="4.9664"/>
|
||||
<Cube currency="NZD" rate="1.6881"/>
|
||||
<Cube currency="PHP" rate="57.96"/>
|
||||
<Cube currency="SGD" rate="1.6035"/>
|
||||
<Cube currency="THB" rate="38.013"/>
|
||||
<Cube currency="ZAR" rate="16.8359"/>
|
||||
</Cube>
|
||||
<Cube time="1999-01-08">
|
||||
<Cube currency="USD" rate="1.1659"/>
|
||||
<Cube currency="JPY" rate="130.09"/>
|
||||
<Cube currency="CYP" rate="0.58187"/>
|
||||
<Cube currency="CZK" rate="34.938"/>
|
||||
<Cube currency="DKK" rate="7.4433"/>
|
||||
<Cube currency="EEK" rate="15.6466"/>
|
||||
<Cube currency="GBP" rate="0.7094"/>
|
||||
<Cube currency="HUF" rate="250.15"/>
|
||||
<Cube currency="LTL" rate="4.6643"/>
|
||||
<Cube currency="LVL" rate="0.6654"/>
|
||||
<Cube currency="MTL" rate="0.4419"/>
|
||||
<Cube currency="PLN" rate="4.0363"/>
|
||||
<Cube currency="ROL" rate="13143"/>
|
||||
<Cube currency="SEK" rate="9.165"/>
|
||||
<Cube currency="SIT" rate="188.84"/>
|
||||
<Cube currency="SKK" rate="42.56"/>
|
||||
<Cube currency="CHF" rate="1.6138"/>
|
||||
<Cube currency="ISK" rate="80.99"/>
|
||||
<Cube currency="NOK" rate="8.59"/>
|
||||
<Cube currency="TRL" rate="371830"/>
|
||||
<Cube currency="AUD" rate="1.8406"/>
|
||||
<Cube currency="CAD" rate="1.7643"/>
|
||||
<Cube currency="HKD" rate="9.0302"/>
|
||||
<Cube currency="KRW" rate="1366.73"/>
|
||||
<Cube currency="NZD" rate="2.1557"/>
|
||||
<Cube currency="SGD" rate="1.9537"/>
|
||||
<Cube currency="ZAR" rate="6.7855"/>
|
||||
</Cube>
|
||||
<Cube time="1999-01-07">
|
||||
<Cube currency="USD" rate="1.1632"/>
|
||||
<Cube currency="JPY" rate="129.43"/>
|
||||
<Cube currency="CYP" rate="0.58187"/>
|
||||
<Cube currency="CZK" rate="34.886"/>
|
||||
<Cube currency="DKK" rate="7.4431"/>
|
||||
<Cube currency="EEK" rate="15.6466"/>
|
||||
<Cube currency="GBP" rate="0.70585"/>
|
||||
<Cube currency="HUF" rate="250.09"/>
|
||||
<Cube currency="LTL" rate="4.6548"/>
|
||||
<Cube currency="LVL" rate="0.6627"/>
|
||||
<Cube currency="MTL" rate="0.4413"/>
|
||||
<Cube currency="PLN" rate="4.0165"/>
|
||||
<Cube currency="ROL" rate="13092"/>
|
||||
<Cube currency="SEK" rate="9.18"/>
|
||||
<Cube currency="SIT" rate="188.8"/>
|
||||
<Cube currency="SKK" rate="42.765"/>
|
||||
<Cube currency="CHF" rate="1.6165"/>
|
||||
<Cube currency="ISK" rate="81.06"/>
|
||||
<Cube currency="NOK" rate="8.6295"/>
|
||||
<Cube currency="TRL" rate="370147"/>
|
||||
<Cube currency="AUD" rate="1.8474"/>
|
||||
<Cube currency="CAD" rate="1.7602"/>
|
||||
<Cube currency="HKD" rate="9.0131"/>
|
||||
<Cube currency="KRW" rate="1337.16"/>
|
||||
<Cube currency="NZD" rate="2.1531"/>
|
||||
<Cube currency="SGD" rate="1.9436"/>
|
||||
<Cube currency="ZAR" rate="6.8283"/>
|
||||
</Cube>
|
||||
<Cube time="1999-01-06">
|
||||
<Cube currency="USD" rate="1.1743"/>
|
||||
<Cube currency="JPY" rate="131.42"/>
|
||||
<Cube currency="CYP" rate="0.582"/>
|
||||
<Cube currency="CZK" rate="34.85"/>
|
||||
<Cube currency="DKK" rate="7.4452"/>
|
||||
<Cube currency="EEK" rate="15.6466"/>
|
||||
<Cube currency="GBP" rate="0.7076"/>
|
||||
<Cube currency="HUF" rate="250.67"/>
|
||||
<Cube currency="LTL" rate="4.6994"/>
|
||||
<Cube currency="LVL" rate="0.6649"/>
|
||||
<Cube currency="MTL" rate="0.442"/>
|
||||
<Cube currency="PLN" rate="4.0065"/>
|
||||
<Cube currency="ROL" rate="13168"/>
|
||||
<Cube currency="SEK" rate="9.305"/>
|
||||
<Cube currency="SIT" rate="188.7"/>
|
||||
<Cube currency="SKK" rate="42.778"/>
|
||||
<Cube currency="CHF" rate="1.6116"/>
|
||||
<Cube currency="ISK" rate="81.54"/>
|
||||
<Cube currency="NOK" rate="8.7335"/>
|
||||
<Cube currency="TRL" rate="372188"/>
|
||||
<Cube currency="AUD" rate="1.882"/>
|
||||
<Cube currency="CAD" rate="1.7711"/>
|
||||
<Cube currency="HKD" rate="9.101"/>
|
||||
<Cube currency="KRW" rate="1359.54"/>
|
||||
<Cube currency="NZD" rate="2.189"/>
|
||||
<Cube currency="SGD" rate="1.9699"/>
|
||||
<Cube currency="ZAR" rate="6.7307"/>
|
||||
</Cube>
|
||||
<Cube time="1999-01-05">
|
||||
<Cube currency="USD" rate="1.179"/>
|
||||
<Cube currency="JPY" rate="130.96"/>
|
||||
<Cube currency="CYP" rate="0.5823"/>
|
||||
<Cube currency="CZK" rate="34.917"/>
|
||||
<Cube currency="DKK" rate="7.4495"/>
|
||||
<Cube currency="EEK" rate="15.6466"/>
|
||||
<Cube currency="GBP" rate="0.7122"/>
|
||||
<Cube currency="HUF" rate="250.8"/>
|
||||
<Cube currency="LTL" rate="4.7174"/>
|
||||
<Cube currency="LVL" rate="0.6657"/>
|
||||
<Cube currency="MTL" rate="0.4432"/>
|
||||
<Cube currency="PLN" rate="4.0245"/>
|
||||
<Cube currency="ROL" rate="13168"/>
|
||||
<Cube currency="SEK" rate="9.4025"/>
|
||||
<Cube currency="SIT" rate="188.775"/>
|
||||
<Cube currency="SKK" rate="42.848"/>
|
||||
<Cube currency="CHF" rate="1.6123"/>
|
||||
<Cube currency="ISK" rate="81.53"/>
|
||||
<Cube currency="NOK" rate="8.7745"/>
|
||||
<Cube currency="TRL" rate="372816"/>
|
||||
<Cube currency="AUD" rate="1.8944"/>
|
||||
<Cube currency="CAD" rate="1.7965"/>
|
||||
<Cube currency="HKD" rate="9.1341"/>
|
||||
<Cube currency="KRW" rate="1373.01"/>
|
||||
<Cube currency="NZD" rate="2.2011"/>
|
||||
<Cube currency="SGD" rate="1.9655"/>
|
||||
<Cube currency="ZAR" rate="6.7975"/>
|
||||
</Cube>
|
||||
<Cube time="1999-01-04">
|
||||
<Cube currency="USD" rate="1.1789"/>
|
||||
<Cube currency="JPY" rate="133.73"/>
|
||||
<Cube currency="CYP" rate="0.58231"/>
|
||||
<Cube currency="CZK" rate="35.107"/>
|
||||
<Cube currency="DKK" rate="7.4501"/>
|
||||
<Cube currency="EEK" rate="15.6466"/>
|
||||
<Cube currency="GBP" rate="0.7111"/>
|
||||
<Cube currency="HUF" rate="251.48"/>
|
||||
<Cube currency="LTL" rate="4.717"/>
|
||||
<Cube currency="LVL" rate="0.6668"/>
|
||||
<Cube currency="MTL" rate="0.4432"/>
|
||||
<Cube currency="PLN" rate="4.0712"/>
|
||||
<Cube currency="ROL" rate="13111"/>
|
||||
<Cube currency="SEK" rate="9.4696"/>
|
||||
<Cube currency="SIT" rate="189.045"/>
|
||||
<Cube currency="SKK" rate="42.991"/>
|
||||
<Cube currency="CHF" rate="1.6168"/>
|
||||
<Cube currency="ISK" rate="81.48"/>
|
||||
<Cube currency="NOK" rate="8.855"/>
|
||||
<Cube currency="TRL" rate="372274"/>
|
||||
<Cube currency="AUD" rate="1.91"/>
|
||||
<Cube currency="CAD" rate="1.8004"/>
|
||||
<Cube currency="HKD" rate="9.1332"/>
|
||||
<Cube currency="KRW" rate="1398.59"/>
|
||||
<Cube currency="NZD" rate="2.2229"/>
|
||||
<Cube currency="SGD" rate="1.9554"/>
|
||||
<Cube currency="ZAR" rate="6.9358"/>
|
||||
</Cube>
|
||||
</Cube>
|
||||
</gesmes:Envelope>
|
258
tests/pricehist/sources/test_yahoo.py
Normal file
258
tests/pricehist/sources/test_yahoo.py
Normal file
|
@ -0,0 +1,258 @@
|
|||
import logging
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.yahoo import Yahoo
|
||||
|
||||
|
||||
def timestamp(date):
|
||||
return int(
|
||||
datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=timezone.utc).timestamp()
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src():
|
||||
return Yahoo()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def type(src):
|
||||
return src.types()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def requests_mock():
|
||||
with responses.RequestsMock() as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def url(base):
|
||||
return f"https://query1.finance.yahoo.com/v8/finance/chart/{base}"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def recent_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "tsla-recent.json").read_text()
|
||||
requests_mock.add(responses.GET, url("TSLA"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def long_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "ibm-long-partial.json").read_text()
|
||||
requests_mock.add(responses.GET, url("IBM"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def with_null_ok(requests_mock):
|
||||
json = (Path(os.path.splitext(__file__)[0]) / "inrx-with-null.json").read_text()
|
||||
requests_mock.add(responses.GET, url("INR=X"), body=json, status=200)
|
||||
yield requests_mock
|
||||
|
||||
|
||||
def test_normalizesymbol(src):
|
||||
assert src.normalizesymbol("tsla") == "TSLA"
|
||||
|
||||
|
||||
def test_metadata(src):
|
||||
assert isinstance(src.id(), str)
|
||||
assert len(src.id()) > 0
|
||||
|
||||
assert isinstance(src.name(), str)
|
||||
assert len(src.name()) > 0
|
||||
|
||||
assert isinstance(src.description(), str)
|
||||
assert len(src.description()) > 0
|
||||
|
||||
assert isinstance(src.source_url(), str)
|
||||
assert src.source_url().startswith("http")
|
||||
|
||||
assert datetime.strptime(src.start(), "%Y-%m-%d")
|
||||
|
||||
assert isinstance(src.types(), list)
|
||||
assert len(src.types()) > 0
|
||||
assert isinstance(src.types()[0], str)
|
||||
assert len(src.types()[0]) > 0
|
||||
|
||||
assert isinstance(src.notes(), str)
|
||||
|
||||
|
||||
def test_symbols(src, caplog):
|
||||
with caplog.at_level(logging.INFO):
|
||||
symbols = src.symbols()
|
||||
assert symbols == []
|
||||
assert any(["Find the symbol of interest on" in r.message for r in caplog.records])
|
||||
|
||||
|
||||
def test_fetch_known(src, type, recent_ok):
|
||||
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
req = recent_ok.calls[0].request
|
||||
assert req.params["events"] == "capitalGain%7Cdiv%7Csplit"
|
||||
assert req.params["includeAdjustedClose"] == "true"
|
||||
assert (series.base, series.quote) == ("TSLA", "USD")
|
||||
assert len(series.prices) == 5
|
||||
|
||||
|
||||
def test_fetch_requests_and_receives_correct_times(src, type, recent_ok):
|
||||
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
req = recent_ok.calls[0].request
|
||||
assert req.params["period1"] == str(timestamp("2021-01-04"))
|
||||
assert req.params["period2"] == str(timestamp("2021-01-09")) # rounded up one
|
||||
assert req.params["interval"] == "1d"
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("293.339996337890625"))
|
||||
|
||||
|
||||
def test_fetch_ignores_any_extra_row(src, type, recent_ok):
|
||||
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-07"))
|
||||
assert series.prices[0] == Price("2021-01-04", Decimal("243.2566680908203125"))
|
||||
assert series.prices[-1] == Price("2021-01-07", Decimal("272.013336181640625"))
|
||||
|
||||
|
||||
def test_fetch_requests_logged(src, type, recent_ok, caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
logged_requests = 0
|
||||
for r in caplog.records:
|
||||
if r.levelname == "DEBUG" and "curl " in r.message:
|
||||
logged_requests += 1
|
||||
assert logged_requests == 1
|
||||
|
||||
|
||||
def test_fetch_types_all_available(src, recent_ok):
|
||||
adj = src.fetch(Series("TSLA", "", "adjclose", "2021-01-04", "2021-01-08"))
|
||||
opn = src.fetch(Series("TSLA", "", "open", "2021-01-04", "2021-01-08"))
|
||||
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08"))
|
||||
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08"))
|
||||
cls = src.fetch(Series("TSLA", "", "close", "2021-01-04", "2021-01-08"))
|
||||
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08"))
|
||||
assert adj.prices[0].amount == Decimal("243.2566680908203125")
|
||||
assert opn.prices[0].amount == Decimal("239.82000732421875")
|
||||
assert hgh.prices[0].amount == Decimal("248.163330078125")
|
||||
assert low.prices[0].amount == Decimal("239.0633392333984375")
|
||||
assert cls.prices[0].amount == Decimal("243.2566680908203125")
|
||||
assert mid.prices[0].amount == Decimal("243.61333465576171875")
|
||||
|
||||
|
||||
def test_fetch_type_mid_is_mean_of_low_and_high(src, recent_ok):
|
||||
mid = src.fetch(Series("TSLA", "", "mid", "2021-01-04", "2021-01-08")).prices
|
||||
hgh = src.fetch(Series("TSLA", "", "high", "2021-01-04", "2021-01-08")).prices
|
||||
low = src.fetch(Series("TSLA", "", "low", "2021-01-04", "2021-01-08")).prices
|
||||
assert all(
|
||||
[
|
||||
mid[i].amount == (sum([low[i].amount, hgh[i].amount]) / 2)
|
||||
for i in range(0, 5)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_from_before_start(src, type, long_ok):
|
||||
series = src.fetch(Series("IBM", "", type, "1900-01-01", "2021-01-08"))
|
||||
assert series.prices[0] == Price("1962-01-02", Decimal("1.5133211612701416015625"))
|
||||
assert series.prices[-1] == Price("2021-01-08", Decimal("103.2923736572265625"))
|
||||
assert len(series.prices) > 9
|
||||
|
||||
|
||||
def test_fetch_skips_dates_with_nulls(src, type, with_null_ok):
|
||||
series = src.fetch(Series("INR=X", "", type, "2017-07-10", "2017-07-12"))
|
||||
assert series.prices[0] == Price("2017-07-10", Decimal("64.61170196533203125"))
|
||||
assert series.prices[1] == Price("2017-07-12", Decimal("64.52559661865234375"))
|
||||
assert len(series.prices) == 2
|
||||
|
||||
|
||||
def test_fetch_to_future(src, type, recent_ok):
|
||||
series = src.fetch(Series("TSLA", "", type, "2021-01-04", "2100-01-08"))
|
||||
assert len(series.prices) > 0
|
||||
|
||||
|
||||
def test_fetch_no_data_in_past(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
url("TSLA"),
|
||||
status=400,
|
||||
body=(
|
||||
"400 Bad Request: Data doesn't exist for "
|
||||
"startDate = 1262304000, endDate = 1262995200"
|
||||
),
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2010-01-04", "2010-01-08"))
|
||||
assert "No data for the given interval" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_no_data_in_future(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
url("TSLA"),
|
||||
status=400,
|
||||
body=(
|
||||
"400 Bad Request: Data doesn't exist for "
|
||||
"startDate = 1893715200, endDate = 1894147200"
|
||||
),
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2030-01-04", "2030-01-08"))
|
||||
assert "No data for the given interval" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_no_data_on_weekend(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
url("TSLA"),
|
||||
status=404,
|
||||
body="404 Not Found: Timestamp data missing.",
|
||||
)
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2021-01-09", "2021-01-10"))
|
||||
assert "may be for a gap in the data" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_sym(src, type, requests_mock):
|
||||
requests_mock.add(
|
||||
responses.GET,
|
||||
url("NOTABASE"),
|
||||
status=404,
|
||||
body="404 Not Found: No data found, symbol may be delisted",
|
||||
)
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("NOTABASE", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Symbol not found" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_giving_quote(src, type):
|
||||
with pytest.raises(exceptions.InvalidPair) as e:
|
||||
src.fetch(Series("TSLA", "USD", type, "2021-01-04", "2021-01-08"))
|
||||
assert "quote currency" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_network_issue(src, type, requests_mock):
|
||||
body = requests.exceptions.ConnectionError("Network issue")
|
||||
requests_mock.add(responses.GET, url("TSLA"), body=body)
|
||||
with pytest.raises(exceptions.RequestError) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Network issue" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_bad_status(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, url("TSLA"), status=500, body="Some other reason")
|
||||
with pytest.raises(exceptions.BadResponse) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "Internal Server Error" in str(e.value)
|
||||
|
||||
|
||||
def test_fetch_parsing_error(src, type, requests_mock):
|
||||
requests_mock.add(responses.GET, url("TSLA"), body="")
|
||||
with pytest.raises(exceptions.ResponseParsingError) as e:
|
||||
src.fetch(Series("TSLA", "", type, "2021-01-04", "2021-01-08"))
|
||||
assert "error occurred while parsing data from the source" in str(e.value)
|
249
tests/pricehist/sources/test_yahoo/ibm-long-partial.json
Normal file
249
tests/pricehist/sources/test_yahoo/ibm-long-partial.json
Normal file
|
@ -0,0 +1,249 @@
|
|||
{
|
||||
"chart": {
|
||||
"result": [
|
||||
{
|
||||
"meta": {
|
||||
"currency": "USD",
|
||||
"symbol": "IBM",
|
||||
"exchangeName": "NYQ",
|
||||
"fullExchangeName": "NYSE",
|
||||
"instrumentType": "EQUITY",
|
||||
"firstTradeDate": -252322200,
|
||||
"regularMarketTime": 1726257602,
|
||||
"hasPrePostMarketData": true,
|
||||
"gmtoffset": -14400,
|
||||
"timezone": "EDT",
|
||||
"exchangeTimezoneName": "America/New_York",
|
||||
"regularMarketPrice": 214.79,
|
||||
"fiftyTwoWeekHigh": 216.08,
|
||||
"fiftyTwoWeekLow": 212.13,
|
||||
"regularMarketDayHigh": 216.08,
|
||||
"regularMarketDayLow": 212.13,
|
||||
"regularMarketVolume": 4553547,
|
||||
"longName": "International Business Machines Corporation",
|
||||
"shortName": "International Business Machines",
|
||||
"chartPreviousClose": 7.291,
|
||||
"priceHint": 2,
|
||||
"currentTradingPeriod": {
|
||||
"pre": {
|
||||
"timezone": "EDT",
|
||||
"end": 1726234200,
|
||||
"start": 1726214400,
|
||||
"gmtoffset": -14400
|
||||
},
|
||||
"regular": {
|
||||
"timezone": "EDT",
|
||||
"end": 1726257600,
|
||||
"start": 1726234200,
|
||||
"gmtoffset": -14400
|
||||
},
|
||||
"post": {
|
||||
"timezone": "EDT",
|
||||
"end": 1726272000,
|
||||
"start": 1726257600,
|
||||
"gmtoffset": -14400
|
||||
}
|
||||
},
|
||||
"dataGranularity": "1d",
|
||||
"range": "",
|
||||
"validRanges": [
|
||||
"1d",
|
||||
"5d",
|
||||
"1mo",
|
||||
"3mo",
|
||||
"6mo",
|
||||
"1y",
|
||||
"2y",
|
||||
"5y",
|
||||
"10y",
|
||||
"ytd",
|
||||
"max"
|
||||
]
|
||||
},
|
||||
"timestamp": [
|
||||
-252322200,
|
||||
-252235800,
|
||||
-252149400,
|
||||
-252063000,
|
||||
-251803800,
|
||||
1609770600,
|
||||
1609857000,
|
||||
1609943400,
|
||||
1610029800,
|
||||
1610116200
|
||||
],
|
||||
"events": {
|
||||
"dividends": {
|
||||
"-249298200": {
|
||||
"amount": 0.000956,
|
||||
"date": -249298200
|
||||
},
|
||||
"-241439400": {
|
||||
"amount": 0.000956,
|
||||
"date": -241439400
|
||||
},
|
||||
"-233577000": {
|
||||
"amount": 0.000956,
|
||||
"date": -233577000
|
||||
},
|
||||
"-225797400": {
|
||||
"amount": 0.000956,
|
||||
"date": -225797400
|
||||
},
|
||||
"-217848600": {
|
||||
"amount": 0.001275,
|
||||
"date": -217848600
|
||||
},
|
||||
"1573137000": {
|
||||
"amount": 1.548757,
|
||||
"date": 1573137000
|
||||
},
|
||||
"1581085800": {
|
||||
"amount": 1.548757,
|
||||
"date": 1581085800
|
||||
},
|
||||
"1588858200": {
|
||||
"amount": 1.558317,
|
||||
"date": 1588858200
|
||||
},
|
||||
"1596807000": {
|
||||
"amount": 1.558317,
|
||||
"date": 1596807000
|
||||
},
|
||||
"1604932200": {
|
||||
"amount": 1.558317,
|
||||
"date": 1604932200
|
||||
}
|
||||
},
|
||||
"splits": {
|
||||
"-177417000": {
|
||||
"date": -177417000,
|
||||
"numerator": 5.0,
|
||||
"denominator": 4.0,
|
||||
"splitRatio": "5:4"
|
||||
},
|
||||
"-114345000": {
|
||||
"date": -114345000,
|
||||
"numerator": 3.0,
|
||||
"denominator": 2.0,
|
||||
"splitRatio": "3:2"
|
||||
},
|
||||
"-53343000": {
|
||||
"date": -53343000,
|
||||
"numerator": 2.0,
|
||||
"denominator": 1.0,
|
||||
"splitRatio": "2:1"
|
||||
},
|
||||
"107530200": {
|
||||
"date": 107530200,
|
||||
"numerator": 5.0,
|
||||
"denominator": 4.0,
|
||||
"splitRatio": "5:4"
|
||||
},
|
||||
"297091800": {
|
||||
"date": 297091800,
|
||||
"numerator": 4.0,
|
||||
"denominator": 1.0,
|
||||
"splitRatio": "4:1"
|
||||
},
|
||||
"864826200": {
|
||||
"date": 864826200,
|
||||
"numerator": 2.0,
|
||||
"denominator": 1.0,
|
||||
"splitRatio": "2:1"
|
||||
},
|
||||
"927811800": {
|
||||
"date": 927811800,
|
||||
"numerator": 2.0,
|
||||
"denominator": 1.0,
|
||||
"splitRatio": "2:1"
|
||||
}
|
||||
}
|
||||
},
|
||||
"indicators": {
|
||||
"quote": [
|
||||
{
|
||||
"close": [
|
||||
7.2912678718566895,
|
||||
7.3550028800964355,
|
||||
7.281707763671875,
|
||||
7.138305187225342,
|
||||
7.00446081161499,
|
||||
118.48948669433594,
|
||||
120.59273529052734,
|
||||
123.60420989990234,
|
||||
123.31739807128906,
|
||||
122.87763214111328
|
||||
],
|
||||
"low": [
|
||||
7.2912678718566895,
|
||||
7.2912678718566895,
|
||||
7.2785210609436035,
|
||||
7.125557899475098,
|
||||
6.9471001625061035,
|
||||
117.62906646728516,
|
||||
119.13002014160156,
|
||||
121.14722442626953,
|
||||
122.61949920654297,
|
||||
121.39579010009766
|
||||
],
|
||||
"open": [
|
||||
7.374124050140381,
|
||||
7.2912678718566895,
|
||||
7.3550028800964355,
|
||||
7.272148132324219,
|
||||
7.131930828094482,
|
||||
120.31549072265625,
|
||||
119.5124282836914,
|
||||
121.3193130493164,
|
||||
124.32122039794922,
|
||||
122.9158706665039
|
||||
],
|
||||
"high": [
|
||||
7.374124050140381,
|
||||
7.3550028800964355,
|
||||
7.3550028800964355,
|
||||
7.272148132324219,
|
||||
7.131930828094482,
|
||||
120.38240814208984,
|
||||
121.1089859008789,
|
||||
126.08030700683594,
|
||||
124.7227554321289,
|
||||
123.63288879394531
|
||||
],
|
||||
"volume": [
|
||||
407940,
|
||||
305955,
|
||||
274575,
|
||||
384405,
|
||||
572685,
|
||||
5417443,
|
||||
6395872,
|
||||
8322708,
|
||||
4714740,
|
||||
4891305
|
||||
]
|
||||
}
|
||||
],
|
||||
"adjclose": [
|
||||
{
|
||||
"adjclose": [
|
||||
1.5133211612701416,
|
||||
1.5265485048294067,
|
||||
1.5113375186920166,
|
||||
1.4815733432769775,
|
||||
1.4537923336029053,
|
||||
99.60364532470703,
|
||||
101.37164306640625,
|
||||
103.90313720703125,
|
||||
103.66202545166016,
|
||||
103.29237365722656
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": null
|
||||
}
|
||||
}
|
119
tests/pricehist/sources/test_yahoo/inrx-with-null.json
Normal file
119
tests/pricehist/sources/test_yahoo/inrx-with-null.json
Normal file
|
@ -0,0 +1,119 @@
|
|||
{
|
||||
"chart": {
|
||||
"result": [
|
||||
{
|
||||
"meta": {
|
||||
"currency": "INR",
|
||||
"symbol": "INR=X",
|
||||
"exchangeName": "CCY",
|
||||
"fullExchangeName": "CCY",
|
||||
"instrumentType": "CURRENCY",
|
||||
"firstTradeDate": 1070236800,
|
||||
"regularMarketTime": 1726284616,
|
||||
"hasPrePostMarketData": false,
|
||||
"gmtoffset": 3600,
|
||||
"timezone": "BST",
|
||||
"exchangeTimezoneName": "Europe/London",
|
||||
"regularMarketPrice": 83.89,
|
||||
"fiftyTwoWeekHigh": 83.89,
|
||||
"fiftyTwoWeekLow": 83.89,
|
||||
"regularMarketDayHigh": 83.89,
|
||||
"regularMarketDayLow": 83.89,
|
||||
"regularMarketVolume": 0,
|
||||
"longName": "USD/INR",
|
||||
"shortName": "USD/INR",
|
||||
"chartPreviousClose": 64.6117,
|
||||
"priceHint": 4,
|
||||
"currentTradingPeriod": {
|
||||
"pre": {
|
||||
"timezone": "BST",
|
||||
"start": 1726182000,
|
||||
"end": 1726182000,
|
||||
"gmtoffset": 3600
|
||||
},
|
||||
"regular": {
|
||||
"timezone": "BST",
|
||||
"start": 1726182000,
|
||||
"end": 1726268340,
|
||||
"gmtoffset": 3600
|
||||
},
|
||||
"post": {
|
||||
"timezone": "BST",
|
||||
"start": 1726268340,
|
||||
"end": 1726268340,
|
||||
"gmtoffset": 3600
|
||||
}
|
||||
},
|
||||
"dataGranularity": "1d",
|
||||
"range": "",
|
||||
"validRanges": [
|
||||
"1d",
|
||||
"5d",
|
||||
"1mo",
|
||||
"3mo",
|
||||
"6mo",
|
||||
"1y",
|
||||
"2y",
|
||||
"5y",
|
||||
"10y",
|
||||
"ytd",
|
||||
"max"
|
||||
]
|
||||
},
|
||||
"timestamp": [
|
||||
1499641200,
|
||||
1499727600,
|
||||
1499814000,
|
||||
1499900400
|
||||
],
|
||||
"indicators": {
|
||||
"quote": [
|
||||
{
|
||||
"open": [
|
||||
64.6155014038086,
|
||||
null,
|
||||
64.55549621582031,
|
||||
64.46800231933594
|
||||
],
|
||||
"volume": [
|
||||
0,
|
||||
null,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"low": [
|
||||
64.41000366210938,
|
||||
null,
|
||||
64.3499984741211,
|
||||
64.33999633789062
|
||||
],
|
||||
"close": [
|
||||
64.61170196533203,
|
||||
null,
|
||||
64.52559661865234,
|
||||
64.36499786376953
|
||||
],
|
||||
"high": [
|
||||
64.6155014038086,
|
||||
null,
|
||||
64.56999969482422,
|
||||
64.48419952392578
|
||||
]
|
||||
}
|
||||
],
|
||||
"adjclose": [
|
||||
{
|
||||
"adjclose": [
|
||||
64.61170196533203,
|
||||
null,
|
||||
64.52559661865234,
|
||||
64.36499786376953
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": null
|
||||
}
|
||||
}
|
126
tests/pricehist/sources/test_yahoo/tsla-recent.json
Normal file
126
tests/pricehist/sources/test_yahoo/tsla-recent.json
Normal file
|
@ -0,0 +1,126 @@
|
|||
{
|
||||
"chart": {
|
||||
"result": [
|
||||
{
|
||||
"meta": {
|
||||
"currency": "USD",
|
||||
"symbol": "TSLA",
|
||||
"exchangeName": "NMS",
|
||||
"fullExchangeName": "NasdaqGS",
|
||||
"instrumentType": "EQUITY",
|
||||
"firstTradeDate": 1277818200,
|
||||
"regularMarketTime": 1726257600,
|
||||
"hasPrePostMarketData": true,
|
||||
"gmtoffset": -14400,
|
||||
"timezone": "EDT",
|
||||
"exchangeTimezoneName": "America/New_York",
|
||||
"regularMarketPrice": 230.29,
|
||||
"fiftyTwoWeekHigh": 232.664,
|
||||
"fiftyTwoWeekLow": 226.32,
|
||||
"regularMarketDayHigh": 232.664,
|
||||
"regularMarketDayLow": 226.32,
|
||||
"regularMarketVolume": 59096538,
|
||||
"longName": "Tesla, Inc.",
|
||||
"shortName": "Tesla, Inc.",
|
||||
"chartPreviousClose": 235.223,
|
||||
"priceHint": 2,
|
||||
"currentTradingPeriod": {
|
||||
"pre": {
|
||||
"timezone": "EDT",
|
||||
"start": 1726214400,
|
||||
"end": 1726234200,
|
||||
"gmtoffset": -14400
|
||||
},
|
||||
"regular": {
|
||||
"timezone": "EDT",
|
||||
"start": 1726234200,
|
||||
"end": 1726257600,
|
||||
"gmtoffset": -14400
|
||||
},
|
||||
"post": {
|
||||
"timezone": "EDT",
|
||||
"start": 1726257600,
|
||||
"end": 1726272000,
|
||||
"gmtoffset": -14400
|
||||
}
|
||||
},
|
||||
"dataGranularity": "1d",
|
||||
"range": "",
|
||||
"validRanges": [
|
||||
"1d",
|
||||
"5d",
|
||||
"1mo",
|
||||
"3mo",
|
||||
"6mo",
|
||||
"1y",
|
||||
"2y",
|
||||
"5y",
|
||||
"10y",
|
||||
"ytd",
|
||||
"max"
|
||||
]
|
||||
},
|
||||
"timestamp": [
|
||||
1609770600,
|
||||
1609857000,
|
||||
1609943400,
|
||||
1610029800,
|
||||
1610116200
|
||||
],
|
||||
"indicators": {
|
||||
"quote": [
|
||||
{
|
||||
"open": [
|
||||
239.82000732421875,
|
||||
241.22000122070312,
|
||||
252.8300018310547,
|
||||
259.2099914550781,
|
||||
285.3333435058594
|
||||
],
|
||||
"close": [
|
||||
243.2566680908203,
|
||||
245.0366668701172,
|
||||
251.9933319091797,
|
||||
272.0133361816406,
|
||||
293.3399963378906
|
||||
],
|
||||
"high": [
|
||||
248.163330078125,
|
||||
246.94667053222656,
|
||||
258.0,
|
||||
272.3299865722656,
|
||||
294.8299865722656
|
||||
],
|
||||
"low": [
|
||||
239.06333923339844,
|
||||
239.73333740234375,
|
||||
249.6999969482422,
|
||||
258.3999938964844,
|
||||
279.46331787109375
|
||||
],
|
||||
"volume": [
|
||||
145914600,
|
||||
96735600,
|
||||
134100000,
|
||||
154496700,
|
||||
225166500
|
||||
]
|
||||
}
|
||||
],
|
||||
"adjclose": [
|
||||
{
|
||||
"adjclose": [
|
||||
243.2566680908203,
|
||||
245.0366668701172,
|
||||
251.9933319091797,
|
||||
272.0133361816406,
|
||||
293.3399963378906
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"error": null
|
||||
}
|
||||
}
|
141
tests/pricehist/test_beanprice.py
Normal file
141
tests/pricehist/test_beanprice.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
import importlib
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist import beanprice, exceptions, sources
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
series = Series(
|
||||
"BTC",
|
||||
"USD",
|
||||
"high",
|
||||
"2021-01-01",
|
||||
"2021-01-03",
|
||||
prices=[
|
||||
Price("2021-01-01", Decimal("1.1")),
|
||||
Price("2021-01-02", Decimal("1.2")),
|
||||
Price("2021-01-03", Decimal("1.3")),
|
||||
],
|
||||
)
|
||||
return series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pricehist_source(mocker, series):
|
||||
mock = mocker.MagicMock()
|
||||
mock.types = mocker.MagicMock(return_value=["close", "high", "low"])
|
||||
mock.fetch = mocker.MagicMock(return_value=series)
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source(pricehist_source):
|
||||
return beanprice.source(pricehist_source)()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ltz():
|
||||
return datetime.now(timezone.utc).astimezone().tzinfo
|
||||
|
||||
|
||||
def test_get_prices_series(pricehist_source, source, ltz):
|
||||
ticker = "BTC:USD:high"
|
||||
begin = datetime(2021, 1, 1, tzinfo=ltz)
|
||||
end = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
result = source.get_prices_series(ticker, begin, end)
|
||||
|
||||
pricehist_source.fetch.assert_called_once_with(
|
||||
Series("BTC", "USD", "high", "2021-01-01", "2021-01-03")
|
||||
)
|
||||
|
||||
assert result == [
|
||||
beanprice.SourcePrice(Decimal("1.1"), datetime(2021, 1, 1, tzinfo=ltz), "USD"),
|
||||
beanprice.SourcePrice(Decimal("1.2"), datetime(2021, 1, 2, tzinfo=ltz), "USD"),
|
||||
beanprice.SourcePrice(Decimal("1.3"), datetime(2021, 1, 3, tzinfo=ltz), "USD"),
|
||||
]
|
||||
|
||||
|
||||
def test_get_prices_series_exception(pricehist_source, source, ltz, mocker):
|
||||
pricehist_source.fetch = mocker.MagicMock(
|
||||
side_effect=exceptions.RequestError("Message")
|
||||
)
|
||||
ticker = "_5eDJI::low"
|
||||
begin = datetime(2021, 1, 1, tzinfo=ltz)
|
||||
end = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
result = source.get_prices_series(ticker, begin, end)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_get_prices_series_special_chars(pricehist_source, source, ltz):
|
||||
ticker = "_5eDJI::low"
|
||||
begin = datetime(2021, 1, 1, tzinfo=ltz)
|
||||
end = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
source.get_prices_series(ticker, begin, end)
|
||||
pricehist_source.fetch.assert_called_once_with(
|
||||
Series("^DJI", "", "low", "2021-01-01", "2021-01-03")
|
||||
)
|
||||
|
||||
|
||||
def test_get_prices_series_price_type(pricehist_source, source, ltz):
|
||||
ticker = "TSLA"
|
||||
begin = datetime(2021, 1, 1, tzinfo=ltz)
|
||||
end = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
source.get_prices_series(ticker, begin, end)
|
||||
pricehist_source.fetch.assert_called_once_with(
|
||||
Series("TSLA", "", "close", "2021-01-01", "2021-01-03")
|
||||
)
|
||||
|
||||
|
||||
def test_get_historical_price(pricehist_source, source, ltz):
|
||||
ticker = "BTC:USD:high"
|
||||
time = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
result = source.get_historical_price(ticker, time)
|
||||
pricehist_source.fetch.assert_called_once_with(
|
||||
Series("BTC", "USD", "high", "2021-01-03", "2021-01-03")
|
||||
)
|
||||
assert result == beanprice.SourcePrice(
|
||||
Decimal("1.3"), datetime(2021, 1, 3, tzinfo=ltz), "USD"
|
||||
)
|
||||
|
||||
|
||||
def test_get_historical_price_none_available(pricehist_source, source, ltz, mocker):
|
||||
pricehist_source.fetch = mocker.MagicMock(
|
||||
return_value=Series("BTC", "USD", "high", "2021-01-03", "2021-01-03", prices=[])
|
||||
)
|
||||
ticker = "BTC:USD:high"
|
||||
time = datetime(2021, 1, 3, tzinfo=ltz)
|
||||
result = source.get_historical_price(ticker, time)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_get_latest_price(pricehist_source, source, ltz):
|
||||
ticker = "BTC:USD:high"
|
||||
start = datetime.combine((date.today() - timedelta(days=7)), datetime.min.time())
|
||||
today = datetime.combine(date.today(), datetime.min.time())
|
||||
result = source.get_latest_price(ticker)
|
||||
pricehist_source.fetch.assert_called_once_with(
|
||||
Series("BTC", "USD", "high", start.date().isoformat(), today.date().isoformat())
|
||||
)
|
||||
assert result == beanprice.SourcePrice(
|
||||
Decimal("1.3"), datetime(2021, 1, 3, tzinfo=ltz), "USD"
|
||||
)
|
||||
|
||||
|
||||
def test_get_latest_price_none_available(pricehist_source, source, ltz, mocker):
|
||||
pricehist_source.fetch = mocker.MagicMock(
|
||||
return_value=Series("BTC", "USD", "high", "2021-01-01", "2021-01-03", prices=[])
|
||||
)
|
||||
ticker = "BTC:USD:high"
|
||||
result = source.get_latest_price(ticker)
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_all_sources_available_for_beanprice():
|
||||
for identifier in sources.by_id.keys():
|
||||
importlib.import_module(f"pricehist.beanprice.{identifier}").Source()
|
166
tests/pricehist/test_cli.py
Normal file
166
tests/pricehist/test_cli.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
import argparse
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist import __version__, cli, sources
|
||||
|
||||
|
||||
def w(string):
|
||||
return string.split(" ")
|
||||
|
||||
|
||||
def test_valid_pair():
|
||||
assert cli.valid_pair("BTC/AUD") == ("BTC", "AUD")
|
||||
assert cli.valid_pair("BTC/AUD/ignored") == ("BTC", "AUD")
|
||||
assert cli.valid_pair("SYM") == ("SYM", "")
|
||||
assert cli.valid_pair("SYM/") == ("SYM", "")
|
||||
with pytest.raises(argparse.ArgumentTypeError):
|
||||
cli.valid_pair("/SYM")
|
||||
with pytest.raises(argparse.ArgumentTypeError):
|
||||
cli.valid_pair("")
|
||||
|
||||
|
||||
def test_valid_date():
|
||||
assert cli.valid_date("today") == cli.today()
|
||||
assert cli.valid_date("2021-12-30") == "2021-12-30"
|
||||
with pytest.raises(argparse.ArgumentTypeError) as e:
|
||||
cli.valid_date("2021-12-40")
|
||||
assert "Not a valid" in str(e.value)
|
||||
|
||||
|
||||
def test_valid_date_before():
|
||||
assert cli.valid_date_before("2021-12-30") == "2021-12-29"
|
||||
with pytest.raises(argparse.ArgumentTypeError) as e:
|
||||
cli.valid_date_before("2021-12-40")
|
||||
assert "Not a valid" in str(e.value)
|
||||
|
||||
|
||||
def test_valid_date_after():
|
||||
assert cli.valid_date_after("2021-12-30") == "2021-12-31"
|
||||
with pytest.raises(argparse.ArgumentTypeError) as e:
|
||||
cli.valid_date_after("2021-12-40")
|
||||
assert "Not a valid" in str(e.value)
|
||||
|
||||
|
||||
def test_valid_char():
|
||||
assert cli.valid_char(",") == ","
|
||||
with pytest.raises(argparse.ArgumentTypeError):
|
||||
cli.valid_char("")
|
||||
with pytest.raises(argparse.ArgumentTypeError):
|
||||
cli.valid_char("12")
|
||||
|
||||
|
||||
def test_cli_no_args_shows_usage(capfd):
|
||||
cli.cli(w("pricehist"))
|
||||
out, err = capfd.readouterr()
|
||||
assert "usage: pricehist" in out
|
||||
assert "optional arguments:" in out or "options:" in out
|
||||
assert "commands:" in out
|
||||
|
||||
|
||||
def test_cli_help_shows_usage_and_exits(capfd):
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.cli(w("pricehist -h"))
|
||||
assert e.value.code == 0
|
||||
out, err = capfd.readouterr()
|
||||
assert "usage: pricehist" in out
|
||||
assert "optional arguments:" in out or "options:" in out
|
||||
assert "commands:" in out
|
||||
|
||||
|
||||
def test_cli_verbose(capfd, mocker):
|
||||
cli.cli(w("pricehist --verbose"))
|
||||
out, err = capfd.readouterr()
|
||||
assert "Ended pricehist run at" in err
|
||||
|
||||
|
||||
def test_cli_version(capfd):
|
||||
cli.cli(w("pricehist --version"))
|
||||
out, err = capfd.readouterr()
|
||||
assert f"pricehist {__version__}\n" == out
|
||||
|
||||
|
||||
def test_cli_sources(capfd):
|
||||
cli.cli(w("pricehist sources"))
|
||||
out, err = capfd.readouterr()
|
||||
for source_id in sources.by_id.keys():
|
||||
assert source_id in out
|
||||
|
||||
|
||||
def test_cli_source(capfd):
|
||||
expected = sources.by_id["ecb"].format_info() + "\n"
|
||||
cli.cli(w("pricehist source ecb"))
|
||||
out, err = capfd.readouterr()
|
||||
assert out == expected
|
||||
|
||||
|
||||
def test_cli_source_symbols(capfd, mocker):
|
||||
sources.by_id["ecb"].symbols = mocker.MagicMock(
|
||||
return_value=[("EUR/AUD", "Euro against Australian Dollar")]
|
||||
)
|
||||
cli.cli(w("pricehist source ecb --symbols"))
|
||||
out, err = capfd.readouterr()
|
||||
assert out == "EUR/AUD Euro against Australian Dollar\n"
|
||||
|
||||
|
||||
def test_cli_source_search(capfd, mocker):
|
||||
sources.by_id["alphavantage"].search = mocker.MagicMock(
|
||||
return_value=[("TSLA", "Tesla Inc, Equity, United States, USD")]
|
||||
)
|
||||
cli.cli(w("pricehist source alphavantage --search TSLA"))
|
||||
out, err = capfd.readouterr()
|
||||
assert out == "TSLA Tesla Inc, Equity, United States, USD\n"
|
||||
|
||||
|
||||
def test_cli_source_fetch(capfd, mocker):
|
||||
formatted_result = "P 2021-01-01 00:00:00 BTC 24139.4648 EUR\n"
|
||||
cli.fetch = mocker.MagicMock(return_value=formatted_result)
|
||||
argv = w("pricehist fetch coindesk BTC/EUR -s 2021-01-01 -e 2021-01-01 -o ledger")
|
||||
cli.cli(argv)
|
||||
out, err = capfd.readouterr()
|
||||
assert out == formatted_result
|
||||
|
||||
|
||||
def test_cli_source_fetch_invalid_start(capfd, mocker):
|
||||
argv = w("pricehist fetch coindesk BTC/EUR -s 2021-01-01 -e 2020-12-01")
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.cli(argv)
|
||||
assert e.value.code != 0
|
||||
out, err = capfd.readouterr()
|
||||
assert "end date '2020-12-01' preceeds the start date" in err
|
||||
|
||||
|
||||
def test_cli_source_fetch_invalid_type(capfd, mocker):
|
||||
argv = w("pricehist fetch coindesk BTC/EUR -t notype")
|
||||
with pytest.raises(SystemExit) as e:
|
||||
cli.cli(argv)
|
||||
assert e.value.code != 0
|
||||
out, err = capfd.readouterr()
|
||||
assert "price type 'notype' is not recognized" in err
|
||||
|
||||
|
||||
def test_cli_source_fetch_sets_source_defaults(mocker):
|
||||
cli.fetch = mocker.MagicMock(return_value="")
|
||||
cli.cli(w("pricehist fetch coindesk BTC/EUR"))
|
||||
captured_series = cli.fetch.call_args.args[0]
|
||||
assert captured_series.start == sources.by_id["coindesk"].start()
|
||||
assert captured_series.type == sources.by_id["coindesk"].types()[0]
|
||||
|
||||
|
||||
def test_cli_source_fetch_normalizes_symbols(mocker):
|
||||
cli.fetch = mocker.MagicMock(return_value="")
|
||||
cli.cli(w("pricehist fetch coindesk btc/eur"))
|
||||
captured_series = cli.fetch.call_args.args[0]
|
||||
assert captured_series.base == "BTC"
|
||||
assert captured_series.quote == "EUR"
|
||||
|
||||
|
||||
def test_cli_source_fetch_handles_brokenpipeerror(caplog, mocker):
|
||||
cli.fetch = mocker.MagicMock(side_effect=BrokenPipeError())
|
||||
cli.cli(w("pricehist fetch coindesk BTC/EUR --verbose"))
|
||||
assert any(
|
||||
[
|
||||
"DEBUG" == r.levelname and "output pipe was closed early" in r.message
|
||||
for r in caplog.records
|
||||
]
|
||||
)
|
42
tests/pricehist/test_exceptions.py
Normal file
42
tests/pricehist/test_exceptions.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist import exceptions
|
||||
|
||||
|
||||
def test_handler_logs_debug_information(caplog):
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
try:
|
||||
with exceptions.handler():
|
||||
raise exceptions.RequestError("Some message")
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
assert caplog.records[0].levelname == "DEBUG"
|
||||
assert "exception encountered" in caplog.records[0].message
|
||||
assert caplog.records[0].exc_info
|
||||
|
||||
|
||||
def test_handler_exits_nonzero(caplog):
|
||||
with pytest.raises(SystemExit) as e:
|
||||
with exceptions.handler():
|
||||
raise exceptions.RequestError("Some message")
|
||||
|
||||
assert e.value.code == 1
|
||||
|
||||
|
||||
def test_handler_logs_critical_information(caplog):
|
||||
with caplog.at_level(logging.CRITICAL):
|
||||
try:
|
||||
with exceptions.handler():
|
||||
raise exceptions.RequestError("Some message")
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
assert any(
|
||||
[
|
||||
"CRITICAL" == r.levelname and "Some message" in r.message
|
||||
for r in caplog.records
|
||||
]
|
||||
)
|
199
tests/pricehist/test_fetch.py
Normal file
199
tests/pricehist/test_fetch.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
import logging
|
||||
from datetime import date, timedelta
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist import exceptions
|
||||
from pricehist.fetch import fetch
|
||||
from pricehist.format import Format
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
from pricehist.sources.basesource import BaseSource
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def res_series(mocker):
|
||||
series = mocker.MagicMock()
|
||||
series.start = "2021-01-01"
|
||||
series.end = "2021-01-03"
|
||||
return series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source(res_series, mocker):
|
||||
source = mocker.MagicMock(BaseSource)
|
||||
source.start = mocker.MagicMock(return_value="2021-01-01")
|
||||
source.fetch = mocker.MagicMock(return_value=res_series)
|
||||
return source
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def output(mocker):
|
||||
output = mocker.MagicMock()
|
||||
output.format = mocker.MagicMock(return_value="")
|
||||
return output
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fmt():
|
||||
return Format()
|
||||
|
||||
|
||||
def test_fetch_warns_if_start_before_source_start(source, output, fmt, mocker, caplog):
|
||||
req_series = Series("BTC", "EUR", "close", "2020-12-31", "2021-01-03")
|
||||
source.start = mocker.MagicMock(return_value="2021-01-01")
|
||||
with caplog.at_level(logging.INFO):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
assert any(
|
||||
[
|
||||
"WARNING" == r.levelname and "start date 2020-12-31 preceeds" in r.message
|
||||
for r in caplog.records
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_returns_formatted_output(source, res_series, output, fmt, mocker):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
output.format = mocker.MagicMock(return_value="rendered output")
|
||||
|
||||
result = fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
|
||||
output.format.assert_called_once_with(res_series, source, fmt=fmt)
|
||||
assert result == "rendered output"
|
||||
|
||||
|
||||
def test_fetch_inverts_if_requested(source, res_series, output, fmt, mocker):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
inv_series = mocker.MagicMock()
|
||||
res_series.invert = mocker.MagicMock(return_value=inv_series)
|
||||
|
||||
fetch(req_series, source, output, invert=True, quantize=None, fmt=fmt)
|
||||
|
||||
res_series.invert.assert_called_once_with()
|
||||
output.format.assert_called_once_with(inv_series, source, fmt=fmt)
|
||||
|
||||
|
||||
def test_fetch_quantizes_if_requested(source, res_series, output, fmt, mocker):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
qnt_series = mocker.MagicMock()
|
||||
res_series.quantize = mocker.MagicMock(return_value=qnt_series)
|
||||
|
||||
fetch(req_series, source, output, invert=False, quantize=2, fmt=fmt)
|
||||
|
||||
res_series.quantize.assert_called_once_with(2)
|
||||
output.format.assert_called_once_with(qnt_series, source, fmt=fmt)
|
||||
|
||||
|
||||
def test_fetch_warns_if_no_data(source, res_series, output, fmt, mocker, caplog):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
res_series.prices = mocker.MagicMock(return_value=[])
|
||||
with caplog.at_level(logging.INFO):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
assert any(
|
||||
[
|
||||
"WARNING" == r.levelname and "No data found" in r.message
|
||||
for r in caplog.records
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_warns_if_missing_data_at_start(source, res_series, output, fmt, caplog):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
res_series.prices = [
|
||||
Price("2021-01-02", Decimal("1.2")),
|
||||
Price("2021-01-03", Decimal("1.3")),
|
||||
]
|
||||
with caplog.at_level(logging.INFO):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "WARNING"
|
||||
assert r.message == (
|
||||
"Available data covers the interval [2021-01-02--2021-01-03], "
|
||||
"which starts 1 day later than requested."
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_warns_if_missing_data_at_end(source, res_series, output, fmt, caplog):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
res_series.prices = [Price("2021-01-01", Decimal("1.1"))]
|
||||
with caplog.at_level(logging.INFO):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "WARNING"
|
||||
assert r.message == (
|
||||
"Available data covers the interval [2021-01-01--2021-01-01], "
|
||||
"which ends 2 days earlier than requested."
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_warns_if_missing_data_at_both_ends(
|
||||
source, res_series, output, fmt, caplog
|
||||
):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
res_series.prices = [Price("2021-01-02", Decimal("1.2"))]
|
||||
with caplog.at_level(logging.INFO):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "WARNING"
|
||||
assert r.message == (
|
||||
"Available data covers the interval [2021-01-02--2021-01-02], "
|
||||
"which starts 1 day later and ends 1 day earlier than requested."
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_debug_not_warning_message_if_only_today_missing(
|
||||
source, res_series, output, fmt, caplog
|
||||
):
|
||||
start = (date.today() - timedelta(days=2)).isoformat()
|
||||
yesterday = (date.today() - timedelta(days=1)).isoformat()
|
||||
today = date.today().isoformat()
|
||||
req_series = Series("BTC", "EUR", "close", start, today)
|
||||
res_series.start = start
|
||||
res_series.end = today
|
||||
res_series.prices = [Price(start, Decimal("1.1")), Price(yesterday, Decimal("1.2"))]
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "DEBUG"
|
||||
assert r.message == (
|
||||
f"Available data covers the interval [{start}--{yesterday}], "
|
||||
"which ends 1 day earlier than requested."
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_debug_not_warning_message_if_as_requested(
|
||||
source, res_series, output, fmt, caplog
|
||||
):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
res_series.prices = [
|
||||
Price("2021-01-01", Decimal("1.1")),
|
||||
Price("2021-01-02", Decimal("1.2")),
|
||||
Price("2021-01-03", Decimal("1.3")),
|
||||
]
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "DEBUG"
|
||||
assert r.message == (
|
||||
"Available data covers the interval [2021-01-01--2021-01-03], as requested."
|
||||
)
|
||||
|
||||
|
||||
def test_fetch_handles_source_exceptions(source, output, fmt, mocker, caplog):
|
||||
req_series = Series("BTC", "EUR", "close", "2021-01-01", "2021-01-03")
|
||||
|
||||
def side_effect(_):
|
||||
raise exceptions.RequestError("something strange")
|
||||
|
||||
source.fetch = mocker.MagicMock(side_effect=side_effect)
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
with pytest.raises(SystemExit) as e:
|
||||
fetch(req_series, source, output, invert=False, quantize=None, fmt=fmt)
|
||||
|
||||
r = caplog.records[0]
|
||||
assert r.levelname == "CRITICAL"
|
||||
assert "something strange" in r.message
|
||||
|
||||
assert e.value.code == 1
|
58
tests/pricehist/test_format.py
Normal file
58
tests/pricehist/test_format.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
from collections import namedtuple
|
||||
from decimal import Decimal
|
||||
|
||||
from pricehist.format import Format
|
||||
|
||||
|
||||
def test_fromargs():
|
||||
arg_values = {
|
||||
"formatquote": None,
|
||||
"formattime": "23:59:59",
|
||||
"formatdecimal": None,
|
||||
"formatthousands": None,
|
||||
"formatsymbol": None,
|
||||
"formatdatesep": None,
|
||||
"formatcsvdelim": None,
|
||||
"formatbase": None,
|
||||
"formatjsonnums": None,
|
||||
}
|
||||
args = namedtuple("args", arg_values.keys())(**arg_values)
|
||||
fmt = Format.fromargs(args)
|
||||
assert fmt.time == "23:59:59"
|
||||
assert fmt.symbol == "rightspace"
|
||||
|
||||
|
||||
def test_format_date():
|
||||
assert Format().format_date("2021-01-01") == "2021-01-01"
|
||||
assert Format(datesep="/").format_date("2021-01-01") == "2021/01/01"
|
||||
|
||||
|
||||
def test_format_quote_amount():
|
||||
assert (
|
||||
Format(decimal=",").format_quote_amount("USD", Decimal("1234.5678"))
|
||||
== "1234,5678 USD"
|
||||
)
|
||||
assert (
|
||||
Format(symbol="rightspace").format_quote_amount("USD", Decimal("1234.5678"))
|
||||
== "1234.5678 USD"
|
||||
)
|
||||
assert (
|
||||
Format(symbol="right").format_quote_amount("€", Decimal("1234.5678"))
|
||||
== "1234.5678€"
|
||||
)
|
||||
assert (
|
||||
Format(symbol="leftspace").format_quote_amount("£", Decimal("1234.5678"))
|
||||
== "£ 1234.5678"
|
||||
)
|
||||
assert (
|
||||
Format(symbol="left").format_quote_amount("$", Decimal("1234.5678"))
|
||||
== "$1234.5678"
|
||||
)
|
||||
|
||||
|
||||
def test_format_num():
|
||||
assert Format().format_num(Decimal("1234.5678")) == "1234.5678"
|
||||
assert (
|
||||
Format(decimal=",", thousands=".").format_num(Decimal("1234.5678"))
|
||||
== "1.234,5678"
|
||||
)
|
33
tests/pricehist/test_isocurrencies.py
Normal file
33
tests/pricehist/test_isocurrencies.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
from datetime import datetime
|
||||
|
||||
from pricehist import isocurrencies
|
||||
|
||||
|
||||
def test_current():
|
||||
currency = isocurrencies.by_code()["EUR"]
|
||||
assert currency.code == "EUR"
|
||||
assert currency.number == 978
|
||||
assert currency.minor_units == 2
|
||||
assert currency.name == "Euro"
|
||||
assert "GERMANY" in currency.countries
|
||||
assert "FRANCE" in currency.countries
|
||||
assert not currency.is_fund
|
||||
assert not currency.historical
|
||||
assert not currency.withdrawal_date
|
||||
|
||||
|
||||
def test_historical():
|
||||
currency = isocurrencies.by_code()["DEM"]
|
||||
assert currency.code == "DEM"
|
||||
assert currency.number == 276
|
||||
assert currency.minor_units is None
|
||||
assert currency.name == "Deutsche Mark"
|
||||
assert "GERMANY" in currency.countries
|
||||
assert not currency.is_fund
|
||||
assert currency.historical
|
||||
assert currency.withdrawal_date == "2002-03"
|
||||
|
||||
|
||||
def test_data_dates():
|
||||
assert datetime.strptime(isocurrencies.current_data_date(), "%Y-%m-%d")
|
||||
assert datetime.strptime(isocurrencies.historical_data_date(), "%Y-%m-%d")
|
74
tests/pricehist/test_logger.py
Normal file
74
tests/pricehist/test_logger.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
import logging
|
||||
import sys
|
||||
|
||||
from pricehist import logger
|
||||
|
||||
|
||||
class Record:
|
||||
pass
|
||||
|
||||
|
||||
def test_formatter_no_prefix_for_info():
|
||||
record = Record()
|
||||
record.levelno = logging.INFO
|
||||
record.levelname = "INFO"
|
||||
record.msg = "A message %s"
|
||||
record.args = "for you"
|
||||
record.exc_info = None
|
||||
record.exc_text = ""
|
||||
|
||||
s = logger.Formatter().format(record)
|
||||
|
||||
assert s == "A message for you"
|
||||
|
||||
|
||||
def test_formatter_prefix_for_other_levels():
|
||||
record = Record()
|
||||
record.levelno = logging.WARNING
|
||||
record.levelname = "WARNING"
|
||||
record.msg = "A warning %s"
|
||||
record.args = "for you"
|
||||
record.exc_info = None
|
||||
record.exc_text = ""
|
||||
|
||||
s = logger.Formatter().format(record)
|
||||
|
||||
assert s == "WARNING A warning for you"
|
||||
|
||||
|
||||
def test_formatter_formats_given_exception():
|
||||
|
||||
try:
|
||||
raise Exception("Something happened")
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
record = Record()
|
||||
record.levelno = logging.DEBUG
|
||||
record.levelname = "DEBUG"
|
||||
record.msg = "An exception %s:"
|
||||
record.args = "for you"
|
||||
record.exc_info = exc_info
|
||||
record.exc_text = ""
|
||||
|
||||
s = logger.Formatter().format(record)
|
||||
lines = s.splitlines()
|
||||
|
||||
assert "DEBUG An exception for you:" in lines
|
||||
assert "DEBUG Traceback (most recent call last):" in lines
|
||||
assert any('DEBUG File "' in line for line in lines)
|
||||
assert "DEBUG Exception: Something happened" in lines
|
||||
|
||||
|
||||
def test_init_sets_dest_formatter_and_level(capfd):
|
||||
logger.init()
|
||||
logging.info("Test message")
|
||||
out, err = capfd.readouterr()
|
||||
assert "Test message" not in out
|
||||
assert "Test message" in err.splitlines()
|
||||
assert logging.root.level == logging.INFO
|
||||
|
||||
|
||||
def test_show_debug():
|
||||
logger.show_debug()
|
||||
assert logging.root.level == logging.DEBUG
|
95
tests/pricehist/test_series.py
Normal file
95
tests/pricehist/test_series.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
from dataclasses import replace
|
||||
from decimal import Decimal
|
||||
|
||||
import pytest
|
||||
|
||||
from pricehist.price import Price
|
||||
from pricehist.series import Series
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def series():
|
||||
return Series(
|
||||
"BASE",
|
||||
"QUOTE",
|
||||
"type",
|
||||
"2021-01-01",
|
||||
"2021-06-30",
|
||||
[
|
||||
Price("2021-01-01", Decimal("1.0123456789")),
|
||||
Price("2021-01-02", Decimal("2.01234567890123456789")),
|
||||
Price("2021-01-03", Decimal("3.012345678901234567890123456789")),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_invert(series):
|
||||
result = series.invert()
|
||||
assert (series.base, series.quote) == ("BASE", "QUOTE")
|
||||
assert (result.base, result.quote) == ("QUOTE", "BASE")
|
||||
|
||||
|
||||
def test_rename_base(series):
|
||||
result = series.rename_base("NEWBASE")
|
||||
assert series.base == "BASE"
|
||||
assert result.base == "NEWBASE"
|
||||
|
||||
|
||||
def test_rename_quote(series):
|
||||
result = series.rename_quote("NEWQUOTE")
|
||||
assert series.quote == "QUOTE"
|
||||
assert result.quote == "NEWQUOTE"
|
||||
|
||||
|
||||
def test_quantize_rounds_half_even(series):
|
||||
subject = replace(
|
||||
series,
|
||||
prices=[
|
||||
Price("2021-01-01", Decimal("1.14")),
|
||||
Price("2021-01-02", Decimal("2.25")),
|
||||
Price("2021-01-03", Decimal("3.35")),
|
||||
Price("2021-01-04", Decimal("4.46")),
|
||||
],
|
||||
)
|
||||
amounts = [p.amount for p in subject.quantize(1).prices]
|
||||
assert amounts == [
|
||||
Decimal("1.1"),
|
||||
Decimal("2.2"),
|
||||
Decimal("3.4"),
|
||||
Decimal("4.5"),
|
||||
]
|
||||
|
||||
|
||||
def test_quantize_does_not_extend(series):
|
||||
subject = replace(
|
||||
series,
|
||||
prices=[
|
||||
Price("2021-01-01", Decimal("1.14")),
|
||||
Price("2021-01-02", Decimal("2.25")),
|
||||
Price("2021-01-03", Decimal("3.35")),
|
||||
Price("2021-01-04", Decimal("4.46")),
|
||||
],
|
||||
)
|
||||
amounts = [p.amount for p in subject.quantize(3).prices]
|
||||
assert amounts == [
|
||||
Decimal("1.14"),
|
||||
Decimal("2.25"),
|
||||
Decimal("3.35"),
|
||||
Decimal("4.46"),
|
||||
]
|
||||
|
||||
|
||||
def test_quantize_does_not_go_beyond_context_max_prec(series):
|
||||
subject = replace(
|
||||
series,
|
||||
prices=[
|
||||
Price("2021-01-01", Decimal("1.012345678901234567890123456789")),
|
||||
],
|
||||
)
|
||||
assert subject.prices[0].amount == Decimal("1.012345678901234567890123456789")
|
||||
result0 = subject.quantize(26)
|
||||
result1 = subject.quantize(27)
|
||||
result2 = subject.quantize(35)
|
||||
assert result0.prices[0].amount == Decimal("1.01234567890123456789012346")
|
||||
assert result1.prices[0].amount == Decimal("1.012345678901234567890123457")
|
||||
assert result2.prices[0].amount == Decimal("1.012345678901234567890123457")
|
16
tests/pricehist/test_sources.py
Normal file
16
tests/pricehist/test_sources.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
import re
|
||||
|
||||
from pricehist import sources
|
||||
|
||||
|
||||
def test_formatted_includes_ecb():
|
||||
lines = sources.formatted().splitlines()
|
||||
assert any(re.match(r"ecb +European Central Bank", line) for line in lines)
|
||||
|
||||
|
||||
def test_formatted_names_aligned():
|
||||
lines = sources.formatted().splitlines()
|
||||
offsets = [len(re.match(r"(\w+ +)[^ ]", line)[1]) for line in lines]
|
||||
first = offsets[0]
|
||||
assert first > 1
|
||||
assert all(offset == first for offset in offsets)
|
|
@ -1,5 +0,0 @@
|
|||
from pricehist import __version__
|
||||
|
||||
|
||||
def test_version():
|
||||
assert __version__ == "0.1.0"
|
9
tox.ini
Normal file
9
tox.ini
Normal file
|
@ -0,0 +1,9 @@
|
|||
[tox]
|
||||
isolated_build = True
|
||||
envlist = py38,py39
|
||||
|
||||
[testenv]
|
||||
deps = poetry
|
||||
commands =
|
||||
poetry install
|
||||
poetry run make test
|
Loading…
Add table
Reference in a new issue