2018-09-06 16:18:45 +02:00
|
|
|
import asyncio
|
2015-09-05 22:47:01 +09:00
|
|
|
from datetime import datetime
|
2015-09-04 22:36:52 +09:00
|
|
|
import inspect
|
|
|
|
|
import sys
|
2018-02-10 14:38:06 +01:00
|
|
|
import os
|
|
|
|
|
import json
|
2014-03-31 21:52:31 -04:00
|
|
|
import time
|
2015-10-15 14:56:23 -07:00
|
|
|
import csv
|
2018-01-28 01:56:26 +01:00
|
|
|
import decimal
|
2013-09-23 16:14:28 +02:00
|
|
|
from decimal import Decimal
|
2025-04-24 09:19:07 +02:00
|
|
|
from typing import Sequence, Optional, Mapping, Dict, Union, Tuple
|
2015-02-27 23:10:45 +01:00
|
|
|
|
2025-04-24 09:19:07 +02:00
|
|
|
from aiorpcx.curio import timeout_after, ignore_after
|
2020-11-20 01:45:11 +01:00
|
|
|
import aiohttp
|
2019-02-27 21:48:33 +01:00
|
|
|
|
2020-04-14 16:12:47 +02:00
|
|
|
from . import util
|
2017-01-22 21:25:24 +03:00
|
|
|
from .bitcoin import COIN
|
|
|
|
|
from .i18n import _
|
2025-04-24 09:19:07 +02:00
|
|
|
from .util import (
|
|
|
|
|
ThreadJob, make_dir, log_exceptions, OldTaskGroup, make_aiohttp_session, resource_path, EventListener,
|
|
|
|
|
event_listener, to_decimal, timestamp_to_datetime
|
|
|
|
|
)
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
from .util import NetworkRetryManager
|
2018-09-07 11:34:56 +02:00
|
|
|
from .network import Network
|
2018-10-22 16:41:25 +02:00
|
|
|
from .simple_config import SimpleConfig
|
2019-04-26 18:52:26 +02:00
|
|
|
from .logging import Logger
|
2018-10-22 16:41:25 +02:00
|
|
|
|
2013-09-23 16:14:28 +02:00
|
|
|
|
2015-09-06 18:06:56 +09:00
|
|
|
# See https://en.wikipedia.org/wiki/ISO_4217
|
|
|
|
|
CCY_PRECISIONS = {'BHD': 3, 'BIF': 0, 'BYR': 0, 'CLF': 4, 'CLP': 0,
|
|
|
|
|
'CVE': 0, 'DJF': 0, 'GNF': 0, 'IQD': 3, 'ISK': 0,
|
|
|
|
|
'JOD': 3, 'JPY': 0, 'KMF': 0, 'KRW': 0, 'KWD': 3,
|
|
|
|
|
'LYD': 3, 'MGA': 1, 'MRO': 1, 'OMR': 3, 'PYG': 0,
|
|
|
|
|
'RWF': 0, 'TND': 3, 'UGX': 0, 'UYI': 0, 'VND': 0,
|
2022-12-05 18:44:05 +02:00
|
|
|
'VUV': 0, 'XAF': 0, 'XAU': 4, 'XOF': 0, 'XPF': 0,
|
|
|
|
|
# Cryptocurrencies
|
2024-01-23 11:46:52 +01:00
|
|
|
'BTC': 8, 'LTC': 6, 'XRP': 4, 'ETH': 8,
|
2022-12-05 18:44:05 +02:00
|
|
|
}
|
2015-09-06 18:06:56 +09:00
|
|
|
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
SPOT_RATE_REFRESH_TARGET = 150 # approx. every 2.5 minutes, try to refresh spot price
|
|
|
|
|
SPOT_RATE_CLOSE_TO_STALE = 450 # try harder to fetch an update if price is getting old
|
|
|
|
|
SPOT_RATE_EXPIRY = 600 # spot price becomes stale after 10 minutes -> we no longer show/use it
|
2022-08-24 13:00:41 +00:00
|
|
|
|
|
|
|
|
|
2019-04-26 18:52:26 +02:00
|
|
|
class ExchangeBase(Logger):
|
2015-11-21 15:24:38 +01:00
|
|
|
|
|
|
|
|
def __init__(self, on_quotes, on_history):
|
2019-04-26 18:52:26 +02:00
|
|
|
Logger.__init__(self)
|
2025-05-05 15:01:11 +00:00
|
|
|
self._history = {} # type: Dict[str, Dict[str, str | float]]
|
2022-08-24 13:00:41 +00:00
|
|
|
self._quotes = {} # type: Dict[str, Optional[Decimal]]
|
|
|
|
|
self._quotes_timestamp = 0 # type: Union[int, float]
|
2015-11-21 15:24:38 +01:00
|
|
|
self.on_quotes = on_quotes
|
|
|
|
|
self.on_history = on_history
|
2015-09-05 14:05:37 +09:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_raw(self, site, get_string):
|
2017-01-23 14:56:49 +01:00
|
|
|
# APIs must have https
|
|
|
|
|
url = ''.join(['https://', site, get_string])
|
2019-02-27 21:48:33 +01:00
|
|
|
network = Network.get_instance()
|
|
|
|
|
proxy = network.proxy if network else None
|
|
|
|
|
async with make_aiohttp_session(proxy) as session:
|
2018-09-06 16:18:45 +02:00
|
|
|
async with session.get(url) as response:
|
2019-03-10 07:41:49 +01:00
|
|
|
response.raise_for_status()
|
2018-09-06 16:18:45 +02:00
|
|
|
return await response.text()
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_json(self, site, get_string):
|
|
|
|
|
# APIs must have https
|
2017-01-23 14:56:49 +01:00
|
|
|
url = ''.join(['https://', site, get_string])
|
2019-02-27 21:48:33 +01:00
|
|
|
network = Network.get_instance()
|
|
|
|
|
proxy = network.proxy if network else None
|
|
|
|
|
async with make_aiohttp_session(proxy) as session:
|
2018-09-06 16:18:45 +02:00
|
|
|
async with session.get(url) as response:
|
2019-03-10 07:41:49 +01:00
|
|
|
response.raise_for_status()
|
2018-09-15 00:30:43 +02:00
|
|
|
# set content_type to None to disable checking MIME type
|
|
|
|
|
return await response.json(content_type=None)
|
2018-09-06 16:18:45 +02:00
|
|
|
|
|
|
|
|
async def get_csv(self, site, get_string):
|
|
|
|
|
raw = await self.get_raw(site, get_string)
|
|
|
|
|
reader = csv.DictReader(raw.split('\n'))
|
2015-10-15 14:56:23 -07:00
|
|
|
return list(reader)
|
|
|
|
|
|
2015-09-05 01:24:16 +09:00
|
|
|
def name(self):
|
2015-09-04 22:36:52 +09:00
|
|
|
return self.__class__.__name__
|
|
|
|
|
|
2022-04-20 17:06:00 +02:00
|
|
|
async def update_safe(self, ccy: str) -> None:
|
2015-09-07 00:01:26 +09:00
|
|
|
try:
|
2019-04-26 18:52:26 +02:00
|
|
|
self.logger.info(f"getting fx quotes for {ccy}")
|
2022-08-24 13:00:41 +00:00
|
|
|
self._quotes = await self.get_rates(ccy)
|
|
|
|
|
assert all(isinstance(rate, (Decimal, type(None))) for rate in self._quotes.values()), \
|
|
|
|
|
f"fx rate must be Decimal, got {self._quotes}"
|
exchange_rate: cleaner log line for "failed fx quotes"
catch OSError for proxy-related issues (and probably other low level networking)
```
19.52 | E | exchange_rate.CoinGecko | failed fx quotes: ProxyConnectionError(22, 'Can not connect to proxy localhost:9050 [The remote computer refused the network connection]')
Traceback (most recent call last):
File "...\Python310\site-packages\python_socks\async_\asyncio\ext\_proxy.py", line 59, in _connect
await self._stream.open_connection(
File "...\Python310\site-packages\python_socks\async_\asyncio\ext\_stream.py", line 61, in open_connection
self._reader, self._writer = await asyncio.open_connection(
File "...\Python310\lib\asyncio\streams.py", line 48, in open_connection
transport, _ = await loop.create_connection(
File "...\Python310\lib\asyncio\base_events.py", line 1076, in create_connection
raise exceptions[0]
File "...\Python310\lib\asyncio\base_events.py", line 1060, in create_connection
sock = await self._connect_sock(
File "...\Python310\lib\asyncio\base_events.py", line 969, in _connect_sock
await self.sock_connect(sock, address)
File "...\Python310\lib\asyncio\proactor_events.py", line 709, in sock_connect
return await self._proactor.connect(sock, address)
File "...\Python310\lib\asyncio\windows_events.py", line 826, in _poll
value = callback(transferred, key, ov)
File "...\Python310\lib\asyncio\windows_events.py", line 613, in finish_connect
ov.getresult()
ConnectionRefusedError: [WinError 1225] The remote computer refused the network connection
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "...\electrum\exchange_rate.py", line 85, in update_safe
self._quotes = await self.get_rates(ccy)
File "...\electrum\exchange_rate.py", line 345, in get_rates
json = await self.get_json('api.coingecko.com', '/api/v3/exchange_rates')
File "...\electrum\exchange_rate.py", line 69, in get_json
async with session.get(url) as response:
File "...\Python310\site-packages\aiohttp\client.py", line 1140, in __aenter__
self._resp = await self._coro
File "...\Python310\site-packages\aiohttp\client.py", line 535, in _request
conn = await self._connector.connect(
File "...\Python310\site-packages\aiohttp\connector.py", line 543, in connect
proto = await self._create_connection(req, traces, timeout)
File "...\Python310\site-packages\aiohttp\connector.py", line 906, in _create_connection
_, proto = await self._create_direct_connection(req, traces, timeout)
File "...\Python310\site-packages\aiohttp\connector.py", line 1174, in _create_direct_connection
transp, proto = await self._wrap_create_connection(
File "...\Python310\site-packages\aiohttp_socks\connector.py", line 58, in _wrap_create_connection
stream = await proxy.connect(
File "...\Python310\site-packages\python_socks\async_\asyncio\ext\_proxy.py", line 47, in connect
await self._connect()
File "...\Python310\site-packages\python_socks\async_\asyncio\ext\_proxy.py", line 73, in _connect
raise ProxyConnectionError(e.errno, msg) from e
python_socks._errors.ProxyConnectionError: [Errno 22] Can not connect to proxy localhost:9050 [The remote computer refused the network connection]
```
2023-10-17 14:30:15 +00:00
|
|
|
except (aiohttp.ClientError, asyncio.TimeoutError, OSError) as e:
|
2019-04-26 18:52:26 +02:00
|
|
|
self.logger.info(f"failed fx quotes: {repr(e)}")
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
self.on_quotes()
|
2020-11-20 01:45:11 +01:00
|
|
|
except Exception as e:
|
|
|
|
|
self.logger.exception(f"failed fx quotes: {repr(e)}")
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
self.on_quotes()
|
|
|
|
|
else:
|
2025-04-24 09:03:50 +02:00
|
|
|
self.logger.debug("received fx quotes")
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
self._quotes_timestamp = time.time()
|
|
|
|
|
self.on_quotes(received_new_data=True)
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2024-04-17 14:30:09 +00:00
|
|
|
@staticmethod
|
|
|
|
|
def _read_historical_rates_from_file(
|
|
|
|
|
*, exchange_name: str, ccy: str, cache_dir: str,
|
2025-05-05 15:01:11 +00:00
|
|
|
) -> Tuple[Optional[Dict[str, str]], Optional[float]]:
|
2024-04-17 14:30:09 +00:00
|
|
|
filename = os.path.join(cache_dir, f"{exchange_name}_{ccy}")
|
2019-03-10 07:41:49 +01:00
|
|
|
if not os.path.exists(filename):
|
2024-04-17 14:30:09 +00:00
|
|
|
return None, None
|
2019-03-10 07:41:49 +01:00
|
|
|
timestamp = os.stat(filename).st_mtime
|
|
|
|
|
try:
|
|
|
|
|
with open(filename, 'r', encoding='utf-8') as f:
|
|
|
|
|
h = json.loads(f.read())
|
2023-04-23 01:33:12 +00:00
|
|
|
except Exception:
|
2024-04-17 14:30:09 +00:00
|
|
|
return None, None
|
2019-03-10 07:41:49 +01:00
|
|
|
if not h: # e.g. empty dict
|
2024-04-17 14:30:09 +00:00
|
|
|
return None, None
|
2022-04-20 19:01:10 +02:00
|
|
|
# cast rates to str
|
|
|
|
|
h = {date_str: str(rate) for (date_str, rate) in h.items()}
|
2024-04-17 14:30:09 +00:00
|
|
|
return h, timestamp
|
|
|
|
|
|
|
|
|
|
def read_historical_rates(self, ccy: str, cache_dir: str) -> Optional[dict]:
|
|
|
|
|
h, timestamp = self._read_historical_rates_from_file(
|
|
|
|
|
exchange_name=self.name(),
|
|
|
|
|
ccy=ccy,
|
|
|
|
|
cache_dir=cache_dir,
|
|
|
|
|
)
|
2024-04-17 14:36:21 +00:00
|
|
|
if not h:
|
|
|
|
|
return None
|
2025-05-05 15:01:11 +00:00
|
|
|
assert timestamp is not None
|
2019-03-10 07:41:49 +01:00
|
|
|
h['timestamp'] = timestamp
|
2022-04-20 19:01:10 +02:00
|
|
|
self._history[ccy] = h
|
2019-03-10 07:41:49 +01:00
|
|
|
self.on_history()
|
2018-02-20 10:52:11 +01:00
|
|
|
return h
|
2018-02-14 10:40:11 +01:00
|
|
|
|
2024-04-17 14:30:09 +00:00
|
|
|
@staticmethod
|
|
|
|
|
def _write_historical_rates_to_file(
|
2025-05-05 15:01:11 +00:00
|
|
|
*, exchange_name: str, ccy: str, cache_dir: str, history: Dict[str, str],
|
2024-04-17 14:30:09 +00:00
|
|
|
) -> None:
|
2025-05-05 15:01:11 +00:00
|
|
|
# sanity check types of history dict
|
|
|
|
|
assert 'timestamp' not in history
|
|
|
|
|
for key, rate in history.items():
|
|
|
|
|
assert isinstance(key, str), f"{exchange_name=}. {ccy=}. {key=!r}. {rate=!r}"
|
|
|
|
|
assert isinstance(rate, str), f"{exchange_name=}. {ccy=}. {key=!r}. {rate=!r}"
|
|
|
|
|
# write to file
|
2024-04-17 14:30:09 +00:00
|
|
|
filename = os.path.join(cache_dir, f"{exchange_name}_{ccy}")
|
|
|
|
|
with open(filename, 'w', encoding='utf-8') as f:
|
2025-05-05 15:01:11 +00:00
|
|
|
f.write(json.dumps(history, sort_keys=True))
|
2024-04-17 14:30:09 +00:00
|
|
|
|
2018-10-12 18:29:59 +02:00
|
|
|
@log_exceptions
|
2022-04-20 17:06:00 +02:00
|
|
|
async def get_historical_rates_safe(self, ccy: str, cache_dir: str) -> None:
|
2018-02-20 10:52:11 +01:00
|
|
|
try:
|
2019-04-26 18:52:26 +02:00
|
|
|
self.logger.info(f"requesting fx history for {ccy}")
|
2024-04-17 14:30:09 +00:00
|
|
|
h_new = await self.request_history(ccy)
|
2025-04-24 09:03:50 +02:00
|
|
|
self.logger.debug(f"received fx history for {ccy}")
|
2023-10-19 17:15:41 +00:00
|
|
|
except (aiohttp.ClientError, asyncio.TimeoutError, OSError) as e:
|
2019-04-26 18:52:26 +02:00
|
|
|
self.logger.info(f"failed fx history: {repr(e)}")
|
2018-02-20 10:52:11 +01:00
|
|
|
return
|
2020-11-20 01:45:11 +01:00
|
|
|
except Exception as e:
|
|
|
|
|
self.logger.exception(f"failed fx history: {repr(e)}")
|
|
|
|
|
return
|
2022-04-20 19:01:10 +02:00
|
|
|
# cast rates to str
|
2025-05-05 15:01:11 +00:00
|
|
|
h_new = {date_str: str(rate) for (date_str, rate) in h_new.items()} # type: Dict[str, str]
|
2024-04-17 14:30:09 +00:00
|
|
|
# merge old history and new history. resolve duplicate dates using new data.
|
|
|
|
|
h_old, _timestamp = self._read_historical_rates_from_file(
|
|
|
|
|
exchange_name=self.name(), ccy=ccy, cache_dir=cache_dir,
|
|
|
|
|
)
|
2024-04-17 14:36:21 +00:00
|
|
|
h_old = h_old or {}
|
2024-04-17 14:30:09 +00:00
|
|
|
h = {**h_old, **h_new}
|
|
|
|
|
# write merged data to disk cache
|
|
|
|
|
self._write_historical_rates_to_file(
|
|
|
|
|
exchange_name=self.name(), ccy=ccy, cache_dir=cache_dir, history=h,
|
|
|
|
|
)
|
2025-05-05 15:01:11 +00:00
|
|
|
h['timestamp'] = time.time() # note: this is the only item in h that has a float value
|
2022-04-20 19:01:10 +02:00
|
|
|
self._history[ccy] = h
|
2018-02-10 14:38:06 +01:00
|
|
|
self.on_history()
|
|
|
|
|
|
2022-04-20 17:06:00 +02:00
|
|
|
def get_historical_rates(self, ccy: str, cache_dir: str) -> None:
|
2018-02-20 10:52:11 +01:00
|
|
|
if ccy not in self.history_ccys():
|
|
|
|
|
return
|
2022-04-20 19:01:10 +02:00
|
|
|
h = self._history.get(ccy)
|
2018-02-20 10:52:11 +01:00
|
|
|
if h is None:
|
|
|
|
|
h = self.read_historical_rates(ccy, cache_dir)
|
|
|
|
|
if h is None or h['timestamp'] < time.time() - 24*3600:
|
asyncio: stop using get_event_loop(). introduce ~singleton loop.
asyncio.get_event_loop() became deprecated in python3.10. (see https://github.com/python/cpython/issues/83710)
```
.../electrum/electrum/daemon.py:470: DeprecationWarning: There is no current event loop
self.asyncio_loop = asyncio.get_event_loop()
.../electrum/electrum/network.py:276: DeprecationWarning: There is no current event loop
self.asyncio_loop = asyncio.get_event_loop()
```
Also, according to that thread, "set_event_loop() [... is] not deprecated by oversight".
So, we stop using get_event_loop() and set_event_loop() in our own code.
Note that libraries we use (such as the stdlib for python <3.10), might call get_event_loop,
which then relies on us having called set_event_loop e.g. for the GUI thread. To work around
this, a custom event loop policy providing a get_event_loop implementation is used.
Previously, we have been using a single asyncio event loop, created with
util.create_and_start_event_loop, and code in many places got a reference to this loop
using asyncio.get_event_loop().
Now, we still use a single asyncio event loop, but it is now stored as a global in
util._asyncio_event_loop (access with util.get_asyncio_loop()).
I believe these changes also fix https://github.com/spesmilo/electrum/issues/5376
2022-04-29 18:24:49 +02:00
|
|
|
util.get_asyncio_loop().create_task(self.get_historical_rates_safe(ccy, cache_dir))
|
2015-09-05 14:05:37 +09:00
|
|
|
|
2022-04-20 17:06:00 +02:00
|
|
|
def history_ccys(self) -> Sequence[str]:
|
2015-09-07 00:23:59 +09:00
|
|
|
return []
|
|
|
|
|
|
2022-04-20 19:01:10 +02:00
|
|
|
def historical_rate(self, ccy: str, d_t: datetime) -> Decimal:
|
2023-03-16 16:11:02 +00:00
|
|
|
date_str = d_t.strftime('%Y-%m-%d')
|
|
|
|
|
rate = self._history.get(ccy, {}).get(date_str) or 'NaN'
|
|
|
|
|
try:
|
|
|
|
|
return Decimal(rate)
|
|
|
|
|
except Exception: # guard against garbage coming from exchange
|
|
|
|
|
#self.logger.debug(f"found corrupted historical_rate: {rate=!r}. for {ccy=} at {date_str}")
|
|
|
|
|
return Decimal('NaN')
|
2015-09-05 14:05:37 +09:00
|
|
|
|
2022-04-20 19:01:10 +02:00
|
|
|
async def request_history(self, ccy: str) -> Dict[str, Union[str, float]]:
|
2019-02-27 23:49:25 +01:00
|
|
|
raise NotImplementedError() # implemented by subclasses
|
|
|
|
|
|
2022-04-20 19:01:10 +02:00
|
|
|
async def get_rates(self, ccy: str) -> Mapping[str, Optional[Decimal]]:
|
2019-02-27 23:49:25 +01:00
|
|
|
raise NotImplementedError() # implemented by subclasses
|
|
|
|
|
|
2022-04-20 17:06:00 +02:00
|
|
|
async def get_currencies(self) -> Sequence[str]:
|
2019-02-27 21:48:33 +01:00
|
|
|
rates = await self.get_rates('')
|
2017-09-24 02:53:17 +02:00
|
|
|
return sorted([str(a) for (a, b) in rates.items() if b is not None and len(a)==3])
|
2017-01-23 14:56:49 +01:00
|
|
|
|
2022-08-24 13:00:41 +00:00
|
|
|
def get_cached_spot_quote(self, ccy: str) -> Decimal:
|
|
|
|
|
"""Returns the cached exchange rate as a Decimal"""
|
2022-12-06 02:19:12 +02:00
|
|
|
if ccy == 'BTC':
|
|
|
|
|
return Decimal(1)
|
2022-08-24 13:00:41 +00:00
|
|
|
rate = self._quotes.get(ccy)
|
|
|
|
|
if rate is None:
|
|
|
|
|
return Decimal('NaN')
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
if self._quotes_timestamp + SPOT_RATE_EXPIRY < time.time():
|
2022-08-24 13:00:41 +00:00
|
|
|
# Our rate is stale. Probably better to return no rate than an incorrect one.
|
|
|
|
|
return Decimal('NaN')
|
|
|
|
|
return Decimal(rate)
|
|
|
|
|
|
2025-04-24 09:03:50 +02:00
|
|
|
|
2022-09-23 15:27:00 -04:00
|
|
|
class Yadio(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_currencies(self):
|
|
|
|
|
dicts = await self.get_json('api.yadio.io', '/currencies')
|
|
|
|
|
return list(dicts.keys())
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy: str) -> Mapping[str, Optional[Decimal]]:
|
|
|
|
|
json = await self.get_json('api.yadio.io', '/rate/%s/BTC' % ccy)
|
|
|
|
|
return {ccy: to_decimal(json['rate'])}
|
2019-03-22 16:55:57 +01:00
|
|
|
|
2025-04-24 09:03:50 +02:00
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class BitcoinAverage(ExchangeBase):
|
2019-03-22 16:55:57 +01:00
|
|
|
# note: historical rates used to be freely available
|
|
|
|
|
# but this is no longer the case. see #5188
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('apiv2.bitcoinaverage.com', '/indices/global/ticker/short')
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict([(r.replace("BTC", ""), to_decimal(json[r]['last']))
|
2015-09-04 22:36:52 +09:00
|
|
|
for r in json if r != 'timestamp'])
|
|
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
class Bitcointoyou(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('bitcointoyou.com', "/API/ticker.aspx")
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['ticker']['last'])}
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class BitcoinVenezuela(ExchangeBase):
|
2017-01-23 14:56:49 +01:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.bitcoinvenezuela.com', '/')
|
2022-04-20 19:01:10 +02:00
|
|
|
rates = [(r, to_decimal(json['BTC'][r])) for r in json['BTC']
|
2015-10-24 23:23:43 +09:00
|
|
|
if json['BTC'][r] is not None] # Giving NULL for LTC
|
|
|
|
|
return dict(rates)
|
2015-09-04 22:36:52 +09:00
|
|
|
|
|
|
|
|
def history_ccys(self):
|
2015-09-05 14:05:37 +09:00
|
|
|
return ['ARS', 'EUR', 'USD', 'VEF']
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def request_history(self, ccy):
|
2025-04-24 09:03:50 +02:00
|
|
|
json = await self.get_json('api.bitcoinvenezuela.com', "/historical/index.php?coin=BTC")
|
|
|
|
|
return json[ccy + '_BTC']
|
2015-09-04 22:36:52 +09:00
|
|
|
|
|
|
|
|
|
2018-03-05 20:08:10 +09:00
|
|
|
class Bitbank(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('public.bitbank.cc', '/btc_jpy/ticker')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'JPY': to_decimal(json['data']['last'])}
|
2018-03-05 20:08:10 +09:00
|
|
|
|
|
|
|
|
|
2024-10-17 16:47:16 +00:00
|
|
|
class BitFinex(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_currencies(self):
|
|
|
|
|
json = await self.get_json(
|
|
|
|
|
'api-pub.bitfinex.com',
|
|
|
|
|
f"/v2/conf/pub:list:pair:exchange")
|
|
|
|
|
pairs = [pair for pair in json[0]
|
|
|
|
|
if len(pair) == 6 and pair[:3] == "BTC"]
|
|
|
|
|
return [pair[3:] for pair in pairs]
|
|
|
|
|
|
|
|
|
|
def history_ccys(self):
|
|
|
|
|
return CURRENCIES[self.name()]
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
# ref https://docs.bitfinex.com/reference/rest-public-ticker
|
|
|
|
|
json = await self.get_json(
|
|
|
|
|
'api-pub.bitfinex.com',
|
|
|
|
|
f"/v2/ticker/tBTC{ccy}")
|
|
|
|
|
return {ccy: to_decimal(json[6])}
|
|
|
|
|
|
|
|
|
|
async def request_history(self, ccy):
|
|
|
|
|
# ref https://docs.bitfinex.com/reference/rest-public-candles
|
|
|
|
|
history = await self.get_json(
|
|
|
|
|
'api.bitfinex.com',
|
|
|
|
|
f"/v2/candles/trade:1D:tBTC{ccy}/hist?limit=10000")
|
|
|
|
|
return dict([(timestamp_to_datetime(h[0] // 1000, utc=True).strftime('%Y-%m-%d'), str(h[2]))
|
|
|
|
|
for h in history])
|
|
|
|
|
|
|
|
|
|
|
2018-03-05 20:08:10 +09:00
|
|
|
class BitFlyer(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('bitflyer.jp', '/api/echo/price')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'JPY': to_decimal(json['mid'])}
|
2018-03-05 20:08:10 +09:00
|
|
|
|
|
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class BitPay(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('bitpay.com', '/api/rates')
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict([(r['code'], to_decimal(r['rate'])) for r in json])
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
class Bitso(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.bitso.com', '/v2/ticker')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'MXN': to_decimal(json['last'])}
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
|
2015-10-15 13:10:15 -07:00
|
|
|
class BitStamp(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2019-02-27 23:49:25 +01:00
|
|
|
async def get_currencies(self):
|
2024-10-17 18:26:47 +00:00
|
|
|
# ref https://www.bitstamp.net/api/#tag/Tickers/operation/GetCurrencyPairTickers
|
|
|
|
|
json = await self.get_json(
|
|
|
|
|
'www.bitstamp.net',
|
|
|
|
|
f"/api/v2/ticker/")
|
|
|
|
|
pairs = [ticker["pair"] for ticker in json]
|
|
|
|
|
pairs = [pair for pair in pairs
|
|
|
|
|
if len(pair) == 7 and pair[:4] == "BTC/"]
|
|
|
|
|
return [pair[4:] for pair in pairs]
|
2019-02-27 23:49:25 +01:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
2024-10-17 18:26:47 +00:00
|
|
|
# ref https://www.bitstamp.net/api/#tag/Tickers/operation/GetMarketTicker
|
2019-02-27 23:49:25 +01:00
|
|
|
if ccy in CURRENCIES[self.name()]:
|
|
|
|
|
json = await self.get_json('www.bitstamp.net', f'/api/v2/ticker/btc{ccy.lower()}/')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {ccy: to_decimal(json['last'])}
|
2019-02-27 23:49:25 +01:00
|
|
|
return {}
|
2015-10-15 13:10:15 -07:00
|
|
|
|
2024-10-17 18:26:47 +00:00
|
|
|
def history_ccys(self):
|
|
|
|
|
return CURRENCIES[self.name()]
|
|
|
|
|
|
|
|
|
|
async def request_history(self, ccy):
|
|
|
|
|
# ref https://www.bitstamp.net/api/#tag/Market-info/operation/GetOHLCData
|
|
|
|
|
merged_history = {}
|
|
|
|
|
history_starts = 1313625600 # for BTCUSD pair (probably earliest)
|
|
|
|
|
items_per_request = 1000
|
|
|
|
|
step = 86400
|
|
|
|
|
|
|
|
|
|
async def populate_history(endtime: int):
|
|
|
|
|
history = await self.get_json(
|
|
|
|
|
'www.bitstamp.net',
|
|
|
|
|
f"/api/v2/ohlc/btc{ccy.lower()}/?step={step}&limit={items_per_request}&end={endtime}")
|
|
|
|
|
history = dict([
|
|
|
|
|
(timestamp_to_datetime(int(h["timestamp"]), utc=True).strftime('%Y-%m-%d'), str(h["close"]))
|
|
|
|
|
for h in history["data"]["ohlc"]])
|
|
|
|
|
merged_history.update(history)
|
|
|
|
|
|
|
|
|
|
async with OldTaskGroup() as group:
|
|
|
|
|
endtime = int(time.time())
|
|
|
|
|
while True:
|
|
|
|
|
if endtime < history_starts:
|
|
|
|
|
break
|
|
|
|
|
await group.spawn(populate_history(endtime=endtime))
|
|
|
|
|
endtime = endtime - items_per_request * step
|
|
|
|
|
return merged_history
|
|
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
class Bitvalor(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self,ccy):
|
|
|
|
|
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['ticker_1h']['total']['last'])}
|
2017-06-05 13:05:47 +04:00
|
|
|
|
|
|
|
|
|
2015-09-06 21:40:00 +09:00
|
|
|
class BlockchainInfo(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('blockchain.info', '/ticker')
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict([(r, to_decimal(json[r]['15m'])) for r in json])
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2019-01-17 00:51:15 +01:00
|
|
|
class Bylls(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
2019-03-27 19:01:38 +01:00
|
|
|
json = await self.get_json('bylls.com', '/api/price?from_currency=BTC&to_currency=CAD')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'CAD': to_decimal(json['public_price']['to_price'])}
|
2019-01-17 00:51:15 +01:00
|
|
|
|
|
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class Coinbase(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
2018-12-05 12:26:03 +01:00
|
|
|
json = await self.get_json('api.coinbase.com',
|
|
|
|
|
'/v2/exchange-rates?currency=BTC')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {ccy: to_decimal(rate) for (ccy, rate) in json["data"]["rates"].items()}
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2019-03-22 16:35:02 +01:00
|
|
|
class CoinCap(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.coincap.io', '/v2/rates/bitcoin/')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'USD': to_decimal(json['data']['rateUsd'])}
|
2019-03-22 16:35:02 +01:00
|
|
|
|
|
|
|
|
def history_ccys(self):
|
|
|
|
|
return ['USD']
|
|
|
|
|
|
|
|
|
|
async def request_history(self, ccy):
|
|
|
|
|
# Currently 2000 days is the maximum in 1 API call
|
|
|
|
|
# (and history starts on 2017-03-23)
|
|
|
|
|
history = await self.get_json('api.coincap.io',
|
|
|
|
|
'/v2/assets/bitcoin/history?interval=d1&limit=2000')
|
2023-12-24 08:57:57 +00:00
|
|
|
return dict([(timestamp_to_datetime(h['time']/1000, utc=True).strftime('%Y-%m-%d'), str(h['priceUsd']))
|
2019-03-22 16:35:02 +01:00
|
|
|
for h in history['data']])
|
|
|
|
|
|
|
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class CoinDesk(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_currencies(self):
|
|
|
|
|
dicts = await self.get_json('api.coindesk.com',
|
2015-09-04 22:36:52 +09:00
|
|
|
'/v1/bpi/supported-currencies.json')
|
2018-02-08 22:39:13 +01:00
|
|
|
return [d['currency'] for d in dicts]
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.coindesk.com',
|
2015-09-04 22:36:52 +09:00
|
|
|
'/v1/bpi/currentprice/%s.json' % ccy)
|
2022-04-20 19:01:10 +02:00
|
|
|
result = {ccy: to_decimal(json['bpi'][ccy]['rate_float'])}
|
2015-09-04 22:36:52 +09:00
|
|
|
return result
|
|
|
|
|
|
2015-09-05 14:05:37 +09:00
|
|
|
def history_starts(self):
|
2021-03-21 00:34:25 -04:00
|
|
|
return {'USD': '2012-11-30', 'EUR': '2013-09-01'}
|
2015-09-05 14:05:37 +09:00
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
def history_ccys(self):
|
2015-09-05 14:05:37 +09:00
|
|
|
return self.history_starts().keys()
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def request_history(self, ccy):
|
2015-09-05 14:05:37 +09:00
|
|
|
start = self.history_starts()[ccy]
|
|
|
|
|
end = datetime.today().strftime('%Y-%m-%d')
|
|
|
|
|
# Note ?currency and ?index don't work as documented. Sigh.
|
|
|
|
|
query = ('/v1/bpi/historical/close.json?start=%s&end=%s'
|
|
|
|
|
% (start, end))
|
2018-09-06 16:18:45 +02:00
|
|
|
json = await self.get_json('api.coindesk.com', query)
|
2015-09-07 00:23:59 +09:00
|
|
|
return json['bpi']
|
2016-01-24 11:16:05 +09:00
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2019-03-22 16:35:02 +01:00
|
|
|
class CoinGecko(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.coingecko.com', '/api/v3/exchange_rates')
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict([(ccy.upper(), to_decimal(d['value']))
|
2019-03-22 16:35:02 +01:00
|
|
|
for ccy, d in json['rates'].items()])
|
|
|
|
|
|
|
|
|
|
def history_ccys(self):
|
|
|
|
|
# CoinGecko seems to have historical data for all ccys it supports
|
|
|
|
|
return CURRENCIES[self.name()]
|
|
|
|
|
|
|
|
|
|
async def request_history(self, ccy):
|
2024-10-17 16:47:16 +00:00
|
|
|
# ref https://docs.coingecko.com/v3.0.1/reference/coins-id-market-chart
|
2024-04-17 14:30:09 +00:00
|
|
|
num_days = 365
|
|
|
|
|
# Setting `num_days = "max"` started erroring (around 2024-04) with:
|
|
|
|
|
# > Your request exceeds the allowed time range. Public API users are limited to querying
|
|
|
|
|
# > historical data within the past 365 days. Upgrade to a paid plan to enjoy full historical data access
|
2019-03-22 16:35:02 +01:00
|
|
|
history = await self.get_json('api.coingecko.com',
|
2024-04-17 14:30:09 +00:00
|
|
|
f"/api/v3/coins/bitcoin/market_chart?vs_currency={ccy}&days={num_days}")
|
2019-03-22 16:35:02 +01:00
|
|
|
|
2023-12-24 08:57:57 +00:00
|
|
|
return dict([(timestamp_to_datetime(h[0]/1000, utc=True).strftime('%Y-%m-%d'), str(h[1]))
|
2019-03-22 16:35:02 +01:00
|
|
|
for h in history['prices']])
|
|
|
|
|
|
|
|
|
|
|
2023-12-22 12:03:58 +02:00
|
|
|
class Bit2C(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('bit2c.co.il', '/Exchanges/BtcNis/Ticker.json')
|
|
|
|
|
return {'ILS': to_decimal(json['ll'])}
|
|
|
|
|
|
|
|
|
|
def history_ccys(self):
|
|
|
|
|
return CURRENCIES[self.name()]
|
|
|
|
|
|
|
|
|
|
async def request_history(self, ccy):
|
|
|
|
|
history = await self.get_json('bit2c.co.il',
|
|
|
|
|
'/Exchanges/BtcNis/KLines?resolution=1D&from=1357034400&to=%s' % int(time.time()))
|
|
|
|
|
|
2023-12-24 08:57:57 +00:00
|
|
|
return dict([(timestamp_to_datetime(h[0], utc=True).strftime('%Y-%m-%d'), str(h[6]))
|
2023-12-22 12:03:58 +02:00
|
|
|
for h in history])
|
|
|
|
|
|
|
|
|
|
|
2020-07-19 01:52:31 +00:00
|
|
|
class CointraderMonitor(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('cointradermonitor.com', '/api/pbb/v1/ticker')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['last'])}
|
2020-07-19 01:52:31 +00:00
|
|
|
|
|
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
class itBit(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
2015-09-04 22:36:52 +09:00
|
|
|
ccys = ['USD', 'EUR', 'SGD']
|
2018-09-06 16:18:45 +02:00
|
|
|
json = await self.get_json('api.itbit.com', '/v1/markets/XBT%s/ticker' % ccy)
|
2015-09-04 22:36:52 +09:00
|
|
|
result = dict.fromkeys(ccys)
|
2015-09-07 07:38:30 +09:00
|
|
|
if ccy in ccys:
|
2022-04-20 19:01:10 +02:00
|
|
|
result[ccy] = to_decimal(json['lastPrice'])
|
2015-09-04 22:36:52 +09:00
|
|
|
return result
|
|
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2016-01-13 01:03:30 +01:00
|
|
|
class Kraken(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
2024-10-20 10:09:27 +00:00
|
|
|
# ref https://docs.kraken.com/api/docs/rest-api/get-ticker-information
|
2016-01-13 01:03:30 +01:00
|
|
|
ccys = ['EUR', 'USD', 'CAD', 'GBP', 'JPY']
|
|
|
|
|
pairs = ['XBT%s' % c for c in ccys]
|
2018-09-06 16:18:45 +02:00
|
|
|
json = await self.get_json('api.kraken.com',
|
2016-01-13 01:03:30 +01:00
|
|
|
'/0/public/Ticker?pair=%s' % ','.join(pairs))
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict((k[-3:], to_decimal(v['c'][0]))
|
2016-01-13 01:03:30 +01:00
|
|
|
for k, v in json['result'].items())
|
|
|
|
|
|
2024-10-20 10:09:27 +00:00
|
|
|
# async def request_history(self, ccy):
|
|
|
|
|
# # ref https://docs.kraken.com/api/docs/rest-api/get-ohlc-data
|
|
|
|
|
# pass # limited to last 720 steps (step can by 1 day / 7 days / 15 days)
|
|
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2016-01-15 14:57:48 -02:00
|
|
|
class MercadoBitcoin(ExchangeBase):
|
2017-06-05 13:05:47 +04:00
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['ticker_1h']['exchanges']['MBT']['last'])}
|
2016-01-15 14:57:48 -02:00
|
|
|
|
|
|
|
|
|
2017-06-05 13:05:47 +04:00
|
|
|
class Winkdex(ExchangeBase):
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('winkdex.com', '/api/v0/price')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'USD': to_decimal(json['price']) / 100}
|
2017-01-20 16:50:30 -02:00
|
|
|
|
|
|
|
|
def history_ccys(self):
|
2017-06-05 13:05:47 +04:00
|
|
|
return ['USD']
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def request_history(self, ccy):
|
|
|
|
|
json = await self.get_json('winkdex.com',
|
2017-06-05 13:05:47 +04:00
|
|
|
"/api/v0/series?start_time=1342915200")
|
|
|
|
|
history = json['series'][0]['results']
|
2022-04-20 19:01:10 +02:00
|
|
|
return dict([(h['timestamp'][:10], str(to_decimal(h['price']) / 100))
|
2017-06-05 13:05:47 +04:00
|
|
|
for h in history])
|
2017-01-20 16:50:30 -02:00
|
|
|
|
2016-01-29 19:58:40 +01:00
|
|
|
|
2018-03-05 20:08:10 +09:00
|
|
|
class Zaif(ExchangeBase):
|
2018-09-06 16:18:45 +02:00
|
|
|
async def get_rates(self, ccy):
|
|
|
|
|
json = await self.get_json('api.zaif.jp', '/api/1/last_price/btc_jpy')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'JPY': to_decimal(json['last_price'])}
|
2018-03-05 20:08:10 +09:00
|
|
|
|
|
|
|
|
|
2020-07-17 15:27:36 -03:00
|
|
|
class Bitragem(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self,ccy):
|
|
|
|
|
json = await self.get_json('api.bitragem.com', '/v1/index?asset=BTC&market=BRL')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['response']['index'])}
|
2020-07-17 15:27:36 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class Biscoint(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self,ccy):
|
|
|
|
|
json = await self.get_json('api.biscoint.io', '/v1/ticker?base=BTC"e=BRL')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['data']['last'])}
|
2020-07-17 15:27:36 -03:00
|
|
|
|
|
|
|
|
|
2020-12-06 23:35:39 -03:00
|
|
|
class Walltime(ExchangeBase):
|
|
|
|
|
|
|
|
|
|
async def get_rates(self, ccy):
|
2022-08-24 13:00:41 +00:00
|
|
|
json = await self.get_json('s3.amazonaws.com',
|
2020-12-06 23:35:39 -03:00
|
|
|
'/data-production-walltime-info/production/dynamic/walltime-info.json')
|
2022-04-20 19:01:10 +02:00
|
|
|
return {'BRL': to_decimal(json['BRL_XBT']['last_inexact'])}
|
2020-12-06 23:35:39 -03:00
|
|
|
|
|
|
|
|
|
2016-01-29 19:58:40 +01:00
|
|
|
def dictinvert(d):
|
|
|
|
|
inv = {}
|
2017-01-22 21:25:24 +03:00
|
|
|
for k, vlist in d.items():
|
2016-01-29 19:58:40 +01:00
|
|
|
for v in vlist:
|
|
|
|
|
keys = inv.setdefault(v, [])
|
|
|
|
|
keys.append(k)
|
|
|
|
|
return inv
|
|
|
|
|
|
2017-01-23 14:56:49 +01:00
|
|
|
def get_exchanges_and_currencies():
|
2019-02-27 21:48:33 +01:00
|
|
|
# load currencies.json from disk
|
2019-02-04 18:56:51 +01:00
|
|
|
path = resource_path('currencies.json')
|
2017-01-23 14:56:49 +01:00
|
|
|
try:
|
2018-03-23 21:47:51 +01:00
|
|
|
with open(path, 'r', encoding='utf-8') as f:
|
2017-11-12 14:33:46 +01:00
|
|
|
return json.loads(f.read())
|
2023-04-23 01:33:12 +00:00
|
|
|
except Exception:
|
2017-01-23 14:56:49 +01:00
|
|
|
pass
|
2019-02-27 21:48:33 +01:00
|
|
|
# or if not present, generate it now.
|
|
|
|
|
print("cannot find currencies.json. will regenerate it now.")
|
2017-01-23 14:56:49 +01:00
|
|
|
d = {}
|
2016-01-29 19:58:40 +01:00
|
|
|
is_exchange = lambda obj: (inspect.isclass(obj)
|
|
|
|
|
and issubclass(obj, ExchangeBase)
|
|
|
|
|
and obj != ExchangeBase)
|
2017-01-23 14:56:49 +01:00
|
|
|
exchanges = dict(inspect.getmembers(sys.modules[__name__], is_exchange))
|
2019-02-27 21:48:33 +01:00
|
|
|
|
|
|
|
|
async def get_currencies_safe(name, exchange):
|
2017-01-23 14:56:49 +01:00
|
|
|
try:
|
2019-02-27 21:48:33 +01:00
|
|
|
d[name] = await exchange.get_currencies()
|
2018-02-08 22:39:13 +01:00
|
|
|
print(name, "ok")
|
2023-04-23 01:33:12 +00:00
|
|
|
except Exception:
|
2018-02-08 22:39:13 +01:00
|
|
|
print(name, "error")
|
2019-02-27 21:48:33 +01:00
|
|
|
|
|
|
|
|
async def query_all_exchanges_for_their_ccys_over_network():
|
|
|
|
|
async with timeout_after(10):
|
2022-02-08 12:34:49 +01:00
|
|
|
async with OldTaskGroup() as group:
|
2019-02-27 21:48:33 +01:00
|
|
|
for name, klass in exchanges.items():
|
|
|
|
|
exchange = klass(None, None)
|
|
|
|
|
await group.spawn(get_currencies_safe(name, exchange))
|
exchange_rate: fix regenerating currencies.json
```
cannot find currencies.json. will regenerate it now.
Traceback (most recent call last):
File "...\electrum\run_electrum", line 94, in <module>
from electrum.logging import get_logger, configure_logging # import logging submodule first
File "...\electrum\electrum\__init__.py", line 27, in <module>
from . import daemon
File "...\electrum\electrum\daemon.py", line 55, in <module>
from .exchange_rate import FxThread
File "...\electrum\electrum\exchange_rate.py", line 539, in <module>
CURRENCIES = get_exchanges_and_currencies()
File "...\electrum\electrum\exchange_rate.py", line 529, in get_exchanges_and_currencies
loop = util.get_asyncio_loop()
File "...\electrum\electrum\util.py", line 1578, in get_asyncio_loop
raise Exception("event loop not created yet")
Exception: event loop not created yet
```
2024-10-17 15:59:27 +00:00
|
|
|
|
|
|
|
|
loop = asyncio.new_event_loop()
|
2019-02-27 21:48:33 +01:00
|
|
|
try:
|
|
|
|
|
loop.run_until_complete(query_all_exchanges_for_their_ccys_over_network())
|
|
|
|
|
except Exception as e:
|
|
|
|
|
pass
|
exchange_rate: fix regenerating currencies.json
```
cannot find currencies.json. will regenerate it now.
Traceback (most recent call last):
File "...\electrum\run_electrum", line 94, in <module>
from electrum.logging import get_logger, configure_logging # import logging submodule first
File "...\electrum\electrum\__init__.py", line 27, in <module>
from . import daemon
File "...\electrum\electrum\daemon.py", line 55, in <module>
from .exchange_rate import FxThread
File "...\electrum\electrum\exchange_rate.py", line 539, in <module>
CURRENCIES = get_exchanges_and_currencies()
File "...\electrum\electrum\exchange_rate.py", line 529, in get_exchanges_and_currencies
loop = util.get_asyncio_loop()
File "...\electrum\electrum\util.py", line 1578, in get_asyncio_loop
raise Exception("event loop not created yet")
Exception: event loop not created yet
```
2024-10-17 15:59:27 +00:00
|
|
|
finally:
|
|
|
|
|
loop.close()
|
2018-03-23 21:47:51 +01:00
|
|
|
with open(path, 'w', encoding='utf-8') as f:
|
2017-01-23 14:56:49 +01:00
|
|
|
f.write(json.dumps(d, indent=4, sort_keys=True))
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CURRENCIES = get_exchanges_and_currencies()
|
|
|
|
|
|
2016-01-29 19:58:40 +01:00
|
|
|
|
2017-01-23 14:56:49 +01:00
|
|
|
def get_exchanges_by_ccy(history=True):
|
|
|
|
|
if not history:
|
|
|
|
|
return dictinvert(CURRENCIES)
|
2016-01-29 19:58:40 +01:00
|
|
|
d = {}
|
2017-01-23 14:56:49 +01:00
|
|
|
exchanges = CURRENCIES.keys()
|
|
|
|
|
for name in exchanges:
|
|
|
|
|
klass = globals()[name]
|
2016-01-29 19:58:40 +01:00
|
|
|
exchange = klass(None, None)
|
|
|
|
|
d[name] = exchange.history_ccys()
|
|
|
|
|
return dictinvert(d)
|
|
|
|
|
|
|
|
|
|
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
class FxThread(ThreadJob, EventListener, NetworkRetryManager[str]):
|
2013-09-23 16:14:28 +02:00
|
|
|
|
2023-03-28 15:45:15 +00:00
|
|
|
def __init__(self, *, config: SimpleConfig):
|
2019-04-26 18:52:26 +02:00
|
|
|
ThreadJob.__init__(self)
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
NetworkRetryManager.__init__(
|
|
|
|
|
self,
|
|
|
|
|
max_retry_delay_normal=SPOT_RATE_REFRESH_TARGET,
|
|
|
|
|
init_retry_delay_normal=SPOT_RATE_REFRESH_TARGET,
|
|
|
|
|
max_retry_delay_urgent=SPOT_RATE_REFRESH_TARGET,
|
|
|
|
|
init_retry_delay_urgent=1,
|
|
|
|
|
) # note: we poll every 5 seconds for action, so we won't attempt connections more frequently than that.
|
2017-01-03 09:02:26 +01:00
|
|
|
self.config = config
|
2022-06-22 01:24:19 +02:00
|
|
|
self.register_callbacks()
|
2016-01-21 16:29:46 +01:00
|
|
|
self.ccy = self.get_currency()
|
2015-09-05 22:47:01 +09:00
|
|
|
self.history_used_spot = False
|
2015-09-05 21:47:35 +09:00
|
|
|
self.ccy_combo = None
|
2015-09-05 22:16:24 +09:00
|
|
|
self.hist_checkbox = None
|
2022-04-20 17:06:00 +02:00
|
|
|
self.cache_dir = os.path.join(config.path, 'cache') # type: str
|
2018-09-14 23:01:28 +02:00
|
|
|
self._trigger = asyncio.Event()
|
|
|
|
|
self._trigger.set()
|
2018-02-14 10:40:11 +01:00
|
|
|
self.set_exchange(self.config_exchange())
|
2018-05-28 14:22:54 +02:00
|
|
|
make_dir(self.cache_dir)
|
2018-09-06 16:18:45 +02:00
|
|
|
|
2022-06-22 01:24:19 +02:00
|
|
|
@event_listener
|
|
|
|
|
def on_event_proxy_set(self, *args):
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
self._clear_addr_retry_times()
|
2018-09-14 23:01:28 +02:00
|
|
|
self._trigger.set()
|
2015-09-05 16:33:06 +09:00
|
|
|
|
2018-09-28 17:58:46 +02:00
|
|
|
@staticmethod
|
|
|
|
|
def get_currencies(history: bool) -> Sequence[str]:
|
|
|
|
|
d = get_exchanges_by_ccy(history)
|
2017-01-23 14:56:49 +01:00
|
|
|
return sorted(d.keys())
|
|
|
|
|
|
2018-09-28 17:58:46 +02:00
|
|
|
@staticmethod
|
|
|
|
|
def get_exchanges_by_ccy(ccy: str, history: bool) -> Sequence[str]:
|
|
|
|
|
d = get_exchanges_by_ccy(history)
|
2017-01-23 14:56:49 +01:00
|
|
|
return d.get(ccy, [])
|
2017-01-03 09:02:26 +01:00
|
|
|
|
2018-11-26 21:21:02 +01:00
|
|
|
@staticmethod
|
locale amounts: consistently use "." as dec point, and " " as thou sep
Always use "." as decimal point, and " " as thousands separator.
Previously,
- for decimal point, we were using
- "." in some places (e.g. AmountEdit, most fiat amounts), and
- `locale.localeconv()['decimal_point']` in others.
- for thousands separator, we were using
- "," in some places (most fiat amounts), and
- " " in others (format_satoshis)
I think it is better to be consistent even if whatever we pick differs from the locale.
Using whitespace for thousands separator (vs comma) is probably less confusing for people
whose locale would user "." for ts and "," for dp (as in e.g. German).
The alternative option would be to always use the locale. Even if we decide to do that later,
this refactoring should be useful.
closes https://github.com/spesmilo/electrum/issues/2629
2023-01-10 14:45:35 +00:00
|
|
|
def remove_thousands_separator(text: str) -> str:
|
|
|
|
|
return text.replace(util.THOUSANDS_SEP, "")
|
2018-11-26 21:21:02 +01:00
|
|
|
|
locale amounts: consistently use "." as dec point, and " " as thou sep
Always use "." as decimal point, and " " as thousands separator.
Previously,
- for decimal point, we were using
- "." in some places (e.g. AmountEdit, most fiat amounts), and
- `locale.localeconv()['decimal_point']` in others.
- for thousands separator, we were using
- "," in some places (most fiat amounts), and
- " " in others (format_satoshis)
I think it is better to be consistent even if whatever we pick differs from the locale.
Using whitespace for thousands separator (vs comma) is probably less confusing for people
whose locale would user "." for ts and "," for dp (as in e.g. German).
The alternative option would be to always use the locale. Even if we decide to do that later,
this refactoring should be useful.
closes https://github.com/spesmilo/electrum/issues/2629
2023-01-10 14:45:35 +00:00
|
|
|
def ccy_amount_str(self, amount, *, add_thousands_sep: bool = False, ccy=None) -> str:
|
2022-12-05 18:42:56 +02:00
|
|
|
prec = CCY_PRECISIONS.get(self.ccy if ccy is None else ccy, 2)
|
locale amounts: consistently use "." as dec point, and " " as thou sep
Always use "." as decimal point, and " " as thousands separator.
Previously,
- for decimal point, we were using
- "." in some places (e.g. AmountEdit, most fiat amounts), and
- `locale.localeconv()['decimal_point']` in others.
- for thousands separator, we were using
- "," in some places (most fiat amounts), and
- " " in others (format_satoshis)
I think it is better to be consistent even if whatever we pick differs from the locale.
Using whitespace for thousands separator (vs comma) is probably less confusing for people
whose locale would user "." for ts and "," for dp (as in e.g. German).
The alternative option would be to always use the locale. Even if we decide to do that later,
this refactoring should be useful.
closes https://github.com/spesmilo/electrum/issues/2629
2023-01-10 14:45:35 +00:00
|
|
|
fmt_str = "{:%s.%df}" % ("," if add_thousands_sep else "", max(0, prec))
|
2018-01-28 01:56:26 +01:00
|
|
|
try:
|
|
|
|
|
rounded_amount = round(amount, prec)
|
|
|
|
|
except decimal.InvalidOperation:
|
|
|
|
|
rounded_amount = amount
|
locale amounts: consistently use "." as dec point, and " " as thou sep
Always use "." as decimal point, and " " as thousands separator.
Previously,
- for decimal point, we were using
- "." in some places (e.g. AmountEdit, most fiat amounts), and
- `locale.localeconv()['decimal_point']` in others.
- for thousands separator, we were using
- "," in some places (most fiat amounts), and
- " " in others (format_satoshis)
I think it is better to be consistent even if whatever we pick differs from the locale.
Using whitespace for thousands separator (vs comma) is probably less confusing for people
whose locale would user "." for ts and "," for dp (as in e.g. German).
The alternative option would be to always use the locale. Even if we decide to do that later,
this refactoring should be useful.
closes https://github.com/spesmilo/electrum/issues/2629
2023-01-10 14:45:35 +00:00
|
|
|
text = fmt_str.format(rounded_amount)
|
|
|
|
|
# replace "," -> THOUSANDS_SEP
|
|
|
|
|
# replace "." -> DECIMAL_POINT
|
|
|
|
|
dp_loc = text.find(".")
|
|
|
|
|
text = text.replace(",", util.THOUSANDS_SEP)
|
|
|
|
|
if dp_loc == -1:
|
|
|
|
|
return text
|
|
|
|
|
return text[:dp_loc] + util.DECIMAL_POINT + text[dp_loc+1:]
|
2015-09-06 18:06:56 +09:00
|
|
|
|
2023-05-09 15:43:11 +02:00
|
|
|
def ccy_precision(self, ccy=None) -> int:
|
|
|
|
|
return CCY_PRECISIONS.get(self.ccy if ccy is None else ccy, 2)
|
|
|
|
|
|
2018-09-06 16:18:45 +02:00
|
|
|
async def run(self):
|
|
|
|
|
while True:
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
# keep polling and see if we should refresh spot price or historical prices
|
|
|
|
|
manually_triggered = False
|
|
|
|
|
async with ignore_after(5):
|
|
|
|
|
await self._trigger.wait()
|
|
|
|
|
self._trigger.clear()
|
|
|
|
|
manually_triggered = True
|
|
|
|
|
if not self.is_enabled():
|
|
|
|
|
continue
|
|
|
|
|
if manually_triggered and self.has_history(): # maybe refresh historical prices
|
|
|
|
|
self.exchange.get_historical_rates(self.ccy, self.cache_dir)
|
|
|
|
|
now = time.time()
|
|
|
|
|
if not manually_triggered and self.exchange._quotes_timestamp + SPOT_RATE_REFRESH_TARGET > now:
|
|
|
|
|
continue # last quote still fresh
|
|
|
|
|
# If the last quote is relatively recent, we poll at fixed time intervals.
|
|
|
|
|
# Once it gets close to cache expiry, we change to an exponential backoff, to try to get
|
|
|
|
|
# a quote before it expires. Also, on Android, we might come back from a sleep after a long time,
|
|
|
|
|
# with the last quote close to expiry or already expired, in that case we go into exponential backoff.
|
|
|
|
|
is_urgent = self.exchange._quotes_timestamp + SPOT_RATE_CLOSE_TO_STALE < now
|
|
|
|
|
addr_name = "spot-urgent" if is_urgent else "spot" # this separates retry-counters
|
|
|
|
|
if self._can_retry_addr(addr_name, urgent=is_urgent):
|
|
|
|
|
self._trying_addr_now(addr_name)
|
|
|
|
|
# refresh spot price
|
2019-02-27 21:48:33 +01:00
|
|
|
await self.exchange.update_safe(self.ccy)
|
2017-01-03 09:02:26 +01:00
|
|
|
|
2023-05-24 17:41:44 +00:00
|
|
|
def is_enabled(self) -> bool:
|
|
|
|
|
return self.config.FX_USE_EXCHANGE_RATE
|
2017-01-03 09:02:26 +01:00
|
|
|
|
2023-05-24 17:41:44 +00:00
|
|
|
def set_enabled(self, b: bool) -> None:
|
|
|
|
|
self.config.FX_USE_EXCHANGE_RATE = b
|
2018-09-14 23:01:28 +02:00
|
|
|
self.trigger_update()
|
2017-01-03 09:02:26 +01:00
|
|
|
|
2023-03-16 16:48:12 +01:00
|
|
|
def can_have_history(self):
|
2023-03-12 13:30:11 +01:00
|
|
|
return self.is_enabled() and self.ccy in self.exchange.history_ccys()
|
2017-07-20 19:30:44 +02:00
|
|
|
|
2023-05-03 12:18:34 +00:00
|
|
|
def has_history(self) -> bool:
|
2023-05-24 17:41:44 +00:00
|
|
|
return self.can_have_history() and self.config.FX_HISTORY_RATES
|
2023-03-16 16:48:12 +01:00
|
|
|
|
2022-08-24 13:00:41 +00:00
|
|
|
def get_currency(self) -> str:
|
2015-09-05 22:16:24 +09:00
|
|
|
'''Use when dynamic fetching is needed'''
|
2023-05-24 17:41:44 +00:00
|
|
|
return self.config.FX_CURRENCY
|
2015-09-05 22:16:24 +09:00
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
def config_exchange(self):
|
2023-05-24 17:41:44 +00:00
|
|
|
return self.config.FX_EXCHANGE
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2021-01-29 19:53:04 +01:00
|
|
|
def set_currency(self, ccy: str):
|
2016-01-21 16:29:46 +01:00
|
|
|
self.ccy = ccy
|
2023-05-24 17:41:44 +00:00
|
|
|
self.config.FX_CURRENCY = ccy
|
2018-09-14 23:01:28 +02:00
|
|
|
self.trigger_update()
|
2016-01-29 12:50:38 +01:00
|
|
|
self.on_quotes()
|
2016-01-21 16:29:46 +01:00
|
|
|
|
2018-09-14 23:01:28 +02:00
|
|
|
def trigger_update(self):
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
self._clear_addr_retry_times()
|
2023-03-28 15:45:15 +00:00
|
|
|
loop = util.get_asyncio_loop()
|
|
|
|
|
loop.call_soon_threadsafe(self._trigger.set)
|
2018-09-14 23:01:28 +02:00
|
|
|
|
2015-09-04 22:36:52 +09:00
|
|
|
def set_exchange(self, name):
|
2023-05-24 17:41:44 +00:00
|
|
|
class_ = globals().get(name) or globals().get(self.config.cv.FX_EXCHANGE.get_default_value())
|
2019-04-26 18:52:26 +02:00
|
|
|
self.logger.info(f"using exchange {name}")
|
2015-09-04 22:36:52 +09:00
|
|
|
if self.config_exchange() != name:
|
2023-05-24 17:41:44 +00:00
|
|
|
self.config.FX_EXCHANGE = name
|
2019-07-04 19:55:03 +02:00
|
|
|
assert issubclass(class_, ExchangeBase), f"unexpected type {class_} for {name}"
|
|
|
|
|
self.exchange = class_(self.on_quotes, self.on_history) # type: ExchangeBase
|
2015-09-05 21:47:35 +09:00
|
|
|
# A new exchange means new fx quotes, initially empty. Force
|
|
|
|
|
# a quote refresh
|
2018-09-14 23:01:28 +02:00
|
|
|
self.trigger_update()
|
2018-02-14 10:40:11 +01:00
|
|
|
self.exchange.read_historical_rates(self.ccy, self.cache_dir)
|
2015-09-05 21:47:35 +09:00
|
|
|
|
exchange_rate: try harder to refresh quote when cache is expiring
Previously we polled every 2.5 minutes to get the fx spot price,
and had a 10 minute cache expiry during which the latest spot price
was valid.
On Android, this often resulted in having no price available (showing
"No data" in GUI) when putting the app in the foreground after e.g.
a half-hour sleep in the background: often there would be no fx price
until the next tick, which could take 2.5 minutes. (btw in some cases
I saw the application trying to get new quotes from the network as
soon as the app was put in the foreground but it seems those happened
so fast that the network was not ready yet and DNS lookups failed)
Now we make the behaviour a bit more complex: we still fetch the price
every 2.5 mins, and the cache is still valid for 10 mins, however if
the last price is >7.5 mins old, we become more aggressive and go into
an exponential backoff, initially trying a request every few seconds.
For the Android scenario, this means there might be "No data" for fx
for a few seconds after a long sleep, however if there is a working
network, it should soon get a fresh fx spot price quote.
2023-06-14 15:42:28 +00:00
|
|
|
def on_quotes(self, *, received_new_data: bool = False):
|
|
|
|
|
if received_new_data:
|
|
|
|
|
self._clear_addr_retry_times()
|
2020-04-14 16:12:47 +02:00
|
|
|
util.trigger_callback('on_quotes')
|
2015-09-04 22:36:52 +09:00
|
|
|
|
2015-11-23 19:38:48 +01:00
|
|
|
def on_history(self):
|
2020-04-14 16:12:47 +02:00
|
|
|
util.trigger_callback('on_history')
|
2015-11-21 15:24:38 +01:00
|
|
|
|
2018-09-10 16:43:04 +02:00
|
|
|
def exchange_rate(self) -> Decimal:
|
|
|
|
|
"""Returns the exchange rate as a Decimal"""
|
2021-01-29 19:53:04 +01:00
|
|
|
if not self.is_enabled():
|
|
|
|
|
return Decimal('NaN')
|
2022-08-24 13:00:41 +00:00
|
|
|
return self.exchange.get_cached_spot_quote(self.ccy)
|
2015-11-23 14:15:25 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def format_amount(self, btc_balance, *, timestamp: int = None) -> str:
|
|
|
|
|
if timestamp is None:
|
|
|
|
|
rate = self.exchange_rate()
|
|
|
|
|
else:
|
|
|
|
|
rate = self.timestamp_rate(timestamp)
|
2018-03-03 11:32:38 +01:00
|
|
|
return '' if rate.is_nan() else "%s" % self.value_str(btc_balance, rate)
|
|
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def format_amount_and_units(self, btc_balance, *, timestamp: int = None) -> str:
|
|
|
|
|
if timestamp is None:
|
|
|
|
|
rate = self.exchange_rate()
|
|
|
|
|
else:
|
|
|
|
|
rate = self.timestamp_rate(timestamp)
|
2018-02-20 21:53:12 +01:00
|
|
|
return '' if rate.is_nan() else "%s %s" % (self.value_str(btc_balance, rate), self.ccy)
|
2015-11-23 14:15:25 +01:00
|
|
|
|
2017-05-27 15:48:47 +01:00
|
|
|
def get_fiat_status_text(self, btc_balance, base_unit, decimal_point):
|
2015-11-23 14:15:25 +01:00
|
|
|
rate = self.exchange_rate()
|
2023-04-13 12:03:22 +02:00
|
|
|
if rate.is_nan():
|
|
|
|
|
return _(" (No FX rate available)")
|
|
|
|
|
amount = 1000 if decimal_point == 0 else 1
|
|
|
|
|
value = self.value_str(amount * COIN / (10**(8 - decimal_point)), rate)
|
|
|
|
|
return " %d %s~%s %s" % (amount, base_unit, value, self.ccy)
|
2015-11-23 14:15:25 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def fiat_value(self, satoshis, rate) -> Decimal:
|
2018-02-21 19:04:33 +01:00
|
|
|
return Decimal('NaN') if satoshis is None else Decimal(satoshis) / COIN * Decimal(rate)
|
|
|
|
|
|
2023-06-13 15:59:18 +00:00
|
|
|
def value_str(self, satoshis, rate, *, add_thousands_sep: bool = None) -> str:
|
|
|
|
|
fiat_val = self.fiat_value(satoshis, rate)
|
|
|
|
|
return self.format_fiat(fiat_val, add_thousands_sep=add_thousands_sep)
|
2018-02-14 10:42:09 +01:00
|
|
|
|
2023-06-13 15:59:18 +00:00
|
|
|
def format_fiat(self, value: Decimal, *, add_thousands_sep: bool = None) -> str:
|
2018-02-20 21:53:12 +01:00
|
|
|
if value.is_nan():
|
|
|
|
|
return _("No data")
|
2023-06-13 15:59:18 +00:00
|
|
|
if add_thousands_sep is None:
|
|
|
|
|
add_thousands_sep = True
|
|
|
|
|
return self.ccy_amount_str(value, add_thousands_sep=add_thousands_sep)
|
2015-11-23 14:15:25 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def history_rate(self, d_t: Optional[datetime]) -> Decimal:
|
2018-02-27 16:00:56 +01:00
|
|
|
if d_t is None:
|
|
|
|
|
return Decimal('NaN')
|
2015-11-23 14:15:25 +01:00
|
|
|
rate = self.exchange.historical_rate(self.ccy, d_t)
|
|
|
|
|
# Frequently there is no rate for today, until tomorrow :)
|
|
|
|
|
# Use spot quotes in that case
|
2022-04-20 19:01:10 +02:00
|
|
|
if rate.is_nan() and (datetime.today().date() - d_t.date()).days <= 2:
|
2022-08-24 13:00:41 +00:00
|
|
|
rate = self.exchange.get_cached_spot_quote(self.ccy)
|
2015-11-23 14:15:25 +01:00
|
|
|
self.history_used_spot = True
|
2019-07-14 14:34:02 +02:00
|
|
|
if rate is None:
|
|
|
|
|
rate = 'NaN'
|
2018-02-20 21:53:12 +01:00
|
|
|
return Decimal(rate)
|
2015-12-15 12:29:48 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def historical_value_str(self, satoshis, d_t: Optional[datetime]) -> str:
|
2018-02-21 19:04:33 +01:00
|
|
|
return self.format_fiat(self.historical_value(satoshis, d_t))
|
2018-01-06 12:57:04 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def historical_value(self, satoshis, d_t: Optional[datetime]) -> Decimal:
|
2018-02-21 19:04:33 +01:00
|
|
|
return self.fiat_value(satoshis, self.history_rate(d_t))
|
2018-02-14 10:40:11 +01:00
|
|
|
|
2021-03-12 18:29:00 +01:00
|
|
|
def timestamp_rate(self, timestamp: Optional[int]) -> Decimal:
|
2018-04-25 09:42:07 +02:00
|
|
|
from .util import timestamp_to_datetime
|
2018-01-06 12:57:04 +01:00
|
|
|
date = timestamp_to_datetime(timestamp)
|
|
|
|
|
return self.history_rate(date)
|
2019-03-22 16:48:47 +01:00
|
|
|
|
|
|
|
|
|
2023-05-24 17:41:44 +00:00
|
|
|
assert globals().get(SimpleConfig.FX_EXCHANGE.get_default_value()), f"default exchange {SimpleConfig.FX_EXCHANGE.get_default_value()} does not exist"
|