Fix UNIX timestamp logic to avoid timezone issues for yahoo and coinmarketcap.
This commit is contained in:
parent
ab0c0bdca7
commit
d506b8502e
2 changed files with 23 additions and 6 deletions
|
@ -1,6 +1,6 @@
|
|||
import dataclasses
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
@ -77,10 +77,19 @@ class CoinMarketCap(BaseSource):
|
|||
params["convert"] = series.quote
|
||||
|
||||
params["time_start"] = int(
|
||||
int(datetime.strptime(series.start, "%Y-%m-%d").timestamp())
|
||||
int(
|
||||
datetime.strptime(series.start, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
)
|
||||
params["time_end"] = (
|
||||
int(datetime.strptime(series.end, "%Y-%m-%d").timestamp()) + 24 * 60 * 60
|
||||
int(
|
||||
datetime.strptime(series.end, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
+ 24 * 60 * 60
|
||||
) # round up to include the last day
|
||||
|
||||
response = self.log_curl(requests.get(url, params=params))
|
||||
|
|
|
@ -2,7 +2,7 @@ import csv
|
|||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal
|
||||
|
||||
import requests
|
||||
|
@ -102,8 +102,16 @@ class Yahoo(BaseSource):
|
|||
)
|
||||
spark = json.loads(spark_response.content)
|
||||
|
||||
start_ts = int(datetime.strptime(series.start, "%Y-%m-%d").timestamp())
|
||||
end_ts = int(datetime.strptime(series.end, "%Y-%m-%d").timestamp()) + (
|
||||
start_ts = int(
|
||||
datetime.strptime(series.start, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
)
|
||||
end_ts = int(
|
||||
datetime.strptime(series.end, "%Y-%m-%d")
|
||||
.replace(tzinfo=timezone.utc)
|
||||
.timestamp()
|
||||
) + (
|
||||
24 * 60 * 60
|
||||
) # round up to include the last day
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue