+import time
+import requests
import portfolio
import simplejson as json
from decimal import Decimal as D, ROUND_DOWN
-from datetime import date, datetime
+import datetime
import inspect
+from json import JSONDecodeError
+from simplejson.errors import JSONDecodeError as SimpleJSONDecodeError
+import dbs
-__all__ = ["BalanceStore", "ReportStore", "TradeStore"]
+__all__ = ["Portfolio", "BalanceStore", "ReportStore", "TradeStore"]
class ReportStore:
- def __init__(self, market, verbose_print=True):
+ def __init__(self, market, verbose_print=True, no_http_dup=False):
self.market = market
self.verbose_print = verbose_print
+ self.print_logs = []
self.logs = []
+ self.redis_status = []
+
+ self.no_http_dup = no_http_dup
+ self.last_http = None
+
+ def merge(self, other_report):
+ self.logs += other_report.logs
+ self.logs.sort(key=lambda x: x["date"])
+
+ self.print_logs += other_report.print_logs
+ self.print_logs.sort(key=lambda x: x[0])
def print_log(self, message):
- message = str(message)
+ now = datetime.datetime.now()
+ message = "{:%Y-%m-%d %H:%M:%S}: {}".format(now, str(message))
+ self.print_logs.append([now, message])
if self.verbose_print:
print(message)
def add_log(self, hash_):
- hash_["date"] = datetime.now()
+ hash_["date"] = datetime.datetime.now()
+ if self.market is not None:
+ hash_["user_id"] = self.market.user_id
+ hash_["market_id"] = self.market.market_id
+ else:
+ hash_["user_id"] = None
+ hash_["market_id"] = None
self.logs.append(hash_)
+ return hash_
+
+ def add_redis_status(self, hash_):
+ self.redis_status.append(hash_)
+ return hash_
+
+ @staticmethod
+ def default_json_serial(obj):
+ if isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
+ return str(obj)
def to_json(self):
- def default_json_serial(obj):
- if isinstance(obj, (datetime, date)):
- return obj.isoformat()
- return str(obj)
- return json.dumps(self.logs, default=default_json_serial)
+ return json.dumps(self.logs, default=self.default_json_serial, indent=" ")
+
+ def to_json_array(self):
+ for log in (x.copy() for x in self.logs):
+ yield (
+ log.pop("date"),
+ log.pop("type"),
+ json.dumps(log, default=self.default_json_serial, indent=" ")
+ )
+
+ def to_json_redis(self):
+ for log in (x.copy() for x in self.redis_status):
+ yield (
+ log.pop("type"),
+ json.dumps(log, default=self.default_json_serial)
+ )
def set_verbose(self, verbose_print):
self.verbose_print = verbose_print
"args": args,
})
- def log_balances(self, tag=None):
+ def log_balances(self, tag=None, checkpoint=None, tickers=None,
+ ticker_currency=None, compute_value=None, type=None):
self.print_log("[Balance]")
for currency, balance in self.market.balances.all.items():
self.print_log("\t{}".format(balance))
- self.add_log({
- "type": "balance",
- "tag": tag,
- "balances": self.market.balances.as_json()
- })
+ log = {
+ "type": "balance",
+ "tag": tag,
+ "checkpoint": checkpoint,
+ "balances": self.market.balances.as_json()
+ }
+
+ if tickers is not None:
+ log["tickers"] = self._ticker_hash(tickers, ticker_currency,
+ compute_value, type)
+
+ self.add_log(log.copy())
+ self.add_redis_status(log)
def log_tickers(self, amounts, other_currency,
compute_value, type):
+ log = self._ticker_hash(amounts, other_currency, compute_value,
+ type)
+ log["type"] = "tickers"
+
+ self.add_log(log)
+
+ def _ticker_hash(self, amounts, other_currency, compute_value, type):
values = {}
rates = {}
if callable(compute_value):
for currency, amount in amounts.items():
values[currency] = amount.as_json()["value"]
rates[currency] = amount.rate
- self.add_log({
- "type": "tickers",
- "compute_value": compute_value,
- "balance_type": type,
- "currency": other_currency,
- "balances": values,
- "rates": rates,
- "total": sum(amounts.values()).as_json()["value"]
- })
+ return {
+ "compute_value": compute_value,
+ "balance_type": type,
+ "currency": other_currency,
+ "balances": values,
+ "rates": rates,
+ "total": sum(amounts.values()).as_json()["value"]
+ }
def log_dispatch(self, amount, amounts, liquidity, repartition):
self.add_log({
})
def log_http_request(self, method, url, body, headers, response):
- self.add_log({
- "type": "http_request",
- "method": method,
- "url": url,
- "body": body,
- "headers": headers,
- "status": response.status_code,
- "response": response.text
- })
+ if isinstance(response, Exception):
+ self.add_log({
+ "type": "http_request",
+ "method": method,
+ "url": url,
+ "body": body,
+ "headers": headers,
+ "status": -1,
+ "response": None,
+ "error": response.__class__.__name__,
+ "error_message": str(response),
+ })
+ self.last_http = None
+ elif self.no_http_dup and \
+ self.last_http is not None and \
+ self.last_http["url"] == url and \
+ self.last_http["method"] == method and \
+ self.last_http["response"] == response.text:
+ self.add_log({
+ "type": "http_request",
+ "method": method,
+ "url": url,
+ "body": body,
+ "headers": headers,
+ "status": response.status_code,
+ "duration": response.elapsed.total_seconds(),
+ "response": None,
+ "response_same_as": self.last_http["date"]
+ })
+ else:
+ self.last_http = self.add_log({
+ "type": "http_request",
+ "method": method,
+ "url": url,
+ "body": body,
+ "headers": headers,
+ "status": response.status_code,
+ "duration": response.elapsed.total_seconds(),
+ "response": response.text,
+ "response_same_as": None,
+ })
def log_error(self, action, message=None, exception=None):
self.print_log("[Error] {}".format(action))
"action": action,
})
+ def log_market(self, args):
+ self.add_log({
+ "type": "market",
+ "commit": "$Format:%H$",
+ "args": vars(args),
+ })
+
class BalanceStore:
def __init__(self, market):
self.market = market
compute_value, type)
return amounts
- def fetch_balances(self, tag=None):
+ def fetch_balances(self, tag=None, add_portfolio=False,
+ checkpoint=None, log_tickers=False, add_usdt=False,
+ ticker_currency="BTC", ticker_compute_value="average", ticker_type="total"):
all_balances = self.market.ccxt.fetch_all_balances()
for currency, balance in all_balances.items():
if balance["exchange_total"] != 0 or balance["margin_total"] != 0 or \
currency in self.all:
self.all[currency] = portfolio.Balance(currency, balance)
- self.market.report.log_balances(tag=tag)
+ if add_portfolio:
+ for currency in Portfolio.repartition(from_cache=True):
+ self.all.setdefault(currency, portfolio.Balance(currency, {}))
+ if add_usdt:
+ self.all.setdefault("USDT", portfolio.Balance("USDT", {}))
+ if log_tickers:
+ tickers = self.in_currency(ticker_currency, compute_value=ticker_compute_value, type=ticker_type)
+ self.market.report.log_balances(tag=tag, checkpoint=checkpoint,
+ tickers=tickers, ticker_currency=ticker_currency,
+ compute_value=ticker_compute_value, type=ticker_type)
+ else:
+ self.market.report.log_balances(tag=tag, checkpoint=checkpoint)
+
+ def available_balances_for_repartition(self,
+ compute_value="average", base_currency="BTC",
+ liquidity="medium", repartition=None):
+ if repartition is None:
+ repartition = Portfolio.repartition(liquidity=liquidity)
+ base_currency_balance = self.all.get(base_currency)
+
+ if base_currency_balance is None:
+ total_base_value = portfolio.Amount(base_currency, 0)
+ else:
+ total_base_value = base_currency_balance.exchange_free + \
+ base_currency_balance.margin_available - \
+ base_currency_balance.margin_in_position
+
+ amount_in_position = {}
+
+ # Compute balances already in the target position
+ for currency, (ptt, trade_type) in repartition.items():
+ amount_in_position[currency] = portfolio.Amount(base_currency, 0)
+ balance = self.all.get(currency)
+ if currency != base_currency and balance is not None:
+ if trade_type == "short":
+ amount = balance.margin_borrowed
+ else:
+ amount = balance.exchange_free + balance.exchange_used
+ amount_in_position[currency] = amount.in_currency(base_currency,
+ self.market, compute_value=compute_value)
+ total_base_value += amount_in_position[currency]
+
+ # recursively delete more-than-filled positions from the wanted
+ # repartition
+ did_delete = True
+ while did_delete:
+ did_delete = False
+ sum_ratio = sum([v[0] for k, v in repartition.items()])
+ current_base_value = total_base_value
+ for currency, (ptt, trade_type) in repartition.copy().items():
+ if amount_in_position[currency] > current_base_value * ptt / sum_ratio:
+ did_delete = True
+ del(repartition[currency])
+ total_base_value -= amount_in_position[currency]
+ return repartition, total_base_value, amount_in_position
def dispatch_assets(self, amount, liquidity="medium", repartition=None):
if repartition is None:
- repartition = portfolio.Portfolio.repartition(self.market, liquidity=liquidity)
+ repartition = Portfolio.repartition(liquidity=liquidity)
sum_ratio = sum([v[0] for k, v in repartition.items()])
amounts = {}
for currency, (ptt, trade_type) in repartition.items():
@property
def pending(self):
- return list(filter(lambda t: not t.is_fullfiled, self.all))
+ return list(filter(lambda t: t.pending, self.all))
def compute_trades(self, values_in_base, new_repartition, only=None):
computed_trades = []
orders.append(trade.prepare_order(compute_value=compute_value))
self.market.report.log_orders(orders, only, compute_value)
+ def close_trades(self):
+ for trade in self.all:
+ trade.close()
+
def print_all_with_order(self, ind=""):
for trade in self.all:
trade.print_with_order(ind=ind)
for order in self.all_orders(state="open"):
order.get_status()
+class NoopLock:
+ def __enter__(self, *args):
+ pass
+ def __exit__(self, *args):
+ pass
+
+class LockedVar:
+ def __init__(self, value):
+ self.lock = NoopLock()
+ self.val = value
+
+ def start_lock(self):
+ import threading
+ self.lock = threading.Lock()
+
+ def set(self, value):
+ with self.lock:
+ self.val = value
+
+ def get(self, key=None):
+ with self.lock:
+ if key is not None and isinstance(self.val, dict):
+ return self.val.get(key)
+ else:
+ return self.val
+
+ def __getattr__(self, key):
+ with self.lock:
+ return getattr(self.val, key)
+
+class Portfolio:
+ URL = "https://cryptoportfolio.io/wp-content/uploads/portfolio/json/cryptoportfolio.json"
+ data = LockedVar(None)
+ liquidities = LockedVar({})
+ last_date = LockedVar(None)
+ report = LockedVar(ReportStore(None, no_http_dup=True))
+ worker = None
+ worker_tag = ""
+ worker_started = False
+ worker_notify = None
+ callback = None
+
+ @classmethod
+ def start_worker(cls, poll=30):
+ import threading
+
+ cls.worker = threading.Thread(name="portfolio", daemon=True,
+ target=cls.wait_for_notification, kwargs={"poll": poll})
+ cls.worker_notify = threading.Event()
+ cls.callback = threading.Event()
+
+ cls.last_date.start_lock()
+ cls.liquidities.start_lock()
+ cls.report.start_lock()
+
+ cls.worker_tag = "[Worker] "
+ cls.worker_started = True
+ cls.worker.start()
+
+ @classmethod
+ def is_worker_thread(cls):
+ if cls.worker is None:
+ return False
+ else:
+ import threading
+ return cls.worker == threading.current_thread()
+
+ @classmethod
+ def wait_for_notification(cls, poll=30):
+ if not cls.is_worker_thread():
+ raise RuntimeError("This method needs to be ran with the worker")
+ while cls.worker_started:
+ cls.worker_notify.wait()
+ if cls.worker_started:
+ cls.worker_notify.clear()
+ cls.report.print_log("[Worker] Fetching cryptoportfolio")
+ cls.get_cryptoportfolio(refetch=True)
+ cls.callback.set()
+ time.sleep(poll)
+
+ @classmethod
+ def stop_worker(cls):
+ cls.worker_started = False
+ cls.worker_notify.set()
+
+ @classmethod
+ def notify_and_wait(cls):
+ cls.callback.clear()
+ cls.worker_notify.set()
+ cls.callback.wait()
+
+ @classmethod
+ def wait_for_recent(cls, delta=4, poll=30):
+ cls.get_cryptoportfolio()
+ while cls.last_date.get() is None or datetime.datetime.now() - cls.last_date.get() > datetime.timedelta(delta):
+ if cls.worker is None:
+ time.sleep(poll)
+ cls.report.print_log("Attempt to fetch up-to-date cryptoportfolio")
+ cls.get_cryptoportfolio(refetch=True)
+
+ @classmethod
+ def repartition(cls, liquidity="medium", from_cache=False):
+ if from_cache:
+ cls.retrieve_cryptoportfolio()
+ cls.get_cryptoportfolio()
+ liquidities = cls.liquidities.get(liquidity)
+ if liquidities is not None and cls.last_date.get() in liquidities:
+ return liquidities[cls.last_date.get()].copy()
+
+ @classmethod
+ def get_cryptoportfolio(cls, refetch=False):
+ if cls.data.get() is not None and not refetch:
+ return
+ if cls.worker is not None and not cls.is_worker_thread():
+ cls.notify_and_wait()
+ return
+ try:
+ r = requests.get(cls.URL)
+ cls.report.log_http_request(r.request.method,
+ r.request.url, r.request.body, r.request.headers, r)
+ except Exception as e:
+ cls.report.log_error("{}get_cryptoportfolio".format(cls.worker_tag), exception=e)
+ return
+ try:
+ cls.data.set(r.json(parse_int=D, parse_float=D))
+ cls.parse_cryptoportfolio()
+ cls.store_cryptoportfolio()
+ except (JSONDecodeError, SimpleJSONDecodeError):
+ cls.data.set(None)
+ cls.last_date.set(None)
+ cls.liquidities.set({})
+
+ @classmethod
+ def retrieve_cryptoportfolio(cls):
+ if dbs.redis_connected():
+ repartition = dbs.redis.get("/cryptoportfolio/repartition/latest")
+ date = dbs.redis.get("/cryptoportfolio/repartition/date")
+ if date is not None and repartition is not None:
+ date = datetime.datetime.strptime(date.decode(), "%Y-%m-%d")
+ repartition = json.loads(repartition, parse_int=D, parse_float=D)
+ repartition = { k: { date: v } for k, v in repartition.items() }
+
+ cls.data.set("")
+ cls.last_date.set(date)
+ cls.liquidities.set(repartition)
+
+ @classmethod
+ def store_cryptoportfolio(cls):
+ if dbs.redis_connected():
+ hash_ = {}
+ for liquidity, repartitions in cls.liquidities.items():
+ hash_[liquidity] = repartitions[cls.last_date.get()]
+ dump = json.dumps(hash_)
+ key = "/cryptoportfolio/repartition/latest"
+ dbs.redis.set(key, dump)
+ key = "/cryptoportfolio/repartition/date"
+ dbs.redis.set(key, cls.last_date.date().isoformat())
+
+ @classmethod
+ def parse_cryptoportfolio(cls):
+ def filter_weights(weight_hash):
+ if weight_hash[1][0] == 0:
+ return False
+ if weight_hash[0] == "_row":
+ return False
+ return True
+
+ def clean_weights(i):
+ def clean_weights_(h):
+ if h[0].endswith("s"):
+ return [h[0][0:-1], (h[1][i], "short")]
+ else:
+ return [h[0], (h[1][i], "long")]
+ return clean_weights_
+
+ def parse_weights(portfolio_hash):
+ if "weights" not in portfolio_hash:
+ return {}
+ weights_hash = portfolio_hash["weights"]
+ weights = {}
+ for i in range(len(weights_hash["_row"])):
+ date = datetime.datetime.strptime(weights_hash["_row"][i], "%Y-%m-%d")
+ weights[date] = dict(filter(
+ filter_weights,
+ map(clean_weights(i), weights_hash.items())))
+ return weights
+
+ high_liquidity = parse_weights(cls.data.get("portfolio_1"))
+ medium_liquidity = parse_weights(cls.data.get("portfolio_2"))
+
+ cls.liquidities.set({
+ "medium": medium_liquidity,
+ "high": high_liquidity,
+ })
+ cls.last_date.set(max(
+ max(medium_liquidity.keys(), default=datetime.datetime(1, 1, 1)),
+ max(high_liquidity.keys(), default=datetime.datetime(1, 1, 1))
+ ))