X-Git-Url: https://git.immae.eu/?a=blobdiff_plain;f=store.py;h=467dd4b40cfd9ef05a9a0974c88e40f1560d8c1b;hb=1d72880c097ea8259ce9cc63cfe55e6cc7516bd2;hp=d25dd35d1b06efe8f73b812c90cb1d51a66448cc;hpb=f861492d29f8f646410f7b24f68e0bf6d877ecee;p=perso%2FImmae%2FProjets%2FCryptomonnaies%2FCryptoportfolio%2FTrader.git diff --git a/store.py b/store.py index d25dd35..467dd4b 100644 --- a/store.py +++ b/store.py @@ -1,33 +1,67 @@ +import time +import requests import portfolio import simplejson as json from decimal import Decimal as D, ROUND_DOWN -from datetime import date, datetime +import datetime import inspect +from json import JSONDecodeError +from simplejson.errors import JSONDecodeError as SimpleJSONDecodeError -__all__ = ["BalanceStore", "ReportStore", "TradeStore"] +__all__ = ["Portfolio", "BalanceStore", "ReportStore", "TradeStore"] class ReportStore: - def __init__(self, market, verbose_print=True): + def __init__(self, market, verbose_print=True, no_http_dup=False): self.market = market self.verbose_print = verbose_print + self.print_logs = [] self.logs = [] + self.no_http_dup = no_http_dup + self.last_http = None + + def merge(self, other_report): + self.logs += other_report.logs + self.logs.sort(key=lambda x: x["date"]) + + self.print_logs += other_report.print_logs + self.print_logs.sort(key=lambda x: x[0]) + def print_log(self, message): - message = str(message) + now = datetime.datetime.now() + message = "{:%Y-%m-%d %H:%M:%S}: {}".format(now, str(message)) + self.print_logs.append([now, message]) if self.verbose_print: print(message) def add_log(self, hash_): - hash_["date"] = datetime.now() + hash_["date"] = datetime.datetime.now() + if self.market is not None: + hash_["user_id"] = self.market.user_id + hash_["market_id"] = self.market.market_id + else: + hash_["user_id"] = None + hash_["market_id"] = None self.logs.append(hash_) + return hash_ + + @staticmethod + def default_json_serial(obj): + if isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + return str(obj) def to_json(self): - def default_json_serial(obj): - if isinstance(obj, (datetime, date)): - return obj.isoformat() - return str(obj) - return json.dumps(self.logs, default=default_json_serial, indent=" ") + return json.dumps(self.logs, default=self.default_json_serial, indent=" ") + + def to_json_array(self): + for log in (x.copy() for x in self.logs): + yield ( + log.pop("date"), + log.pop("type"), + json.dumps(log, default=self.default_json_serial, indent=" ") + ) def set_verbose(self, verbose_print): self.verbose_print = verbose_print @@ -152,15 +186,45 @@ class ReportStore: }) def log_http_request(self, method, url, body, headers, response): - self.add_log({ - "type": "http_request", - "method": method, - "url": url, - "body": body, - "headers": headers, - "status": response.status_code, - "response": response.text - }) + if isinstance(response, Exception): + self.add_log({ + "type": "http_request", + "method": method, + "url": url, + "body": body, + "headers": headers, + "status": -1, + "response": None, + "error": response.__class__.__name__, + "error_message": str(response), + }) + self.last_http = None + elif self.no_http_dup and \ + self.last_http is not None and \ + self.last_http["url"] == url and \ + self.last_http["method"] == method and \ + self.last_http["response"] == response.text: + self.add_log({ + "type": "http_request", + "method": method, + "url": url, + "body": body, + "headers": headers, + "status": response.status_code, + "response": None, + "response_same_as": self.last_http["date"] + }) + else: + self.last_http = self.add_log({ + "type": "http_request", + "method": method, + "url": url, + "body": body, + "headers": headers, + "status": response.status_code, + "response": response.text, + "response_same_as": None, + }) def log_error(self, action, message=None, exception=None): self.print_log("[Error] {}".format(action)) @@ -185,6 +249,13 @@ class ReportStore: "action": action, }) + def log_market(self, args): + self.add_log({ + "type": "market", + "commit": "$Format:%H$", + "args": vars(args), + }) + class BalanceStore: def __init__(self, market): self.market = market @@ -213,7 +284,7 @@ class BalanceStore: def dispatch_assets(self, amount, liquidity="medium", repartition=None): if repartition is None: - repartition = portfolio.Portfolio.repartition(self.market, liquidity=liquidity) + repartition = Portfolio.repartition(liquidity=liquidity) sum_ratio = sum([v[0] for k, v in repartition.items()]) amounts = {} for currency, (ptt, trade_type) in repartition.items(): @@ -301,4 +372,170 @@ class TradeStore: for order in self.all_orders(state="open"): order.get_status() +class NoopLock: + def __enter__(self, *args): + pass + def __exit__(self, *args): + pass + +class LockedVar: + def __init__(self, value): + self.lock = NoopLock() + self.val = value + + def start_lock(self): + import threading + self.lock = threading.Lock() + + def set(self, value): + with self.lock: + self.val = value + + def get(self, key=None): + with self.lock: + if key is not None and isinstance(self.val, dict): + return self.val.get(key) + else: + return self.val + + def __getattr__(self, key): + with self.lock: + return getattr(self.val, key) + +class Portfolio: + URL = "https://cryptoportfolio.io/wp-content/uploads/portfolio/json/cryptoportfolio.json" + data = LockedVar(None) + liquidities = LockedVar({}) + last_date = LockedVar(None) + report = LockedVar(ReportStore(None, no_http_dup=True)) + worker = None + worker_started = False + worker_notify = None + callback = None + + @classmethod + def start_worker(cls, poll=30): + import threading + + cls.worker = threading.Thread(name="portfolio", daemon=True, + target=cls.wait_for_notification, kwargs={"poll": poll}) + cls.worker_notify = threading.Event() + cls.callback = threading.Event() + + cls.last_date.start_lock() + cls.liquidities.start_lock() + cls.report.start_lock() + + cls.worker_started = True + cls.worker.start() + + @classmethod + def is_worker_thread(cls): + if cls.worker is None: + return False + else: + import threading + return cls.worker == threading.current_thread() + + @classmethod + def wait_for_notification(cls, poll=30): + if not cls.is_worker_thread(): + raise RuntimeError("This method needs to be ran with the worker") + while cls.worker_started: + cls.worker_notify.wait() + if cls.worker_started: + cls.worker_notify.clear() + cls.report.print_log("Fetching cryptoportfolio") + cls.get_cryptoportfolio(refetch=True) + cls.callback.set() + time.sleep(poll) + + @classmethod + def stop_worker(cls): + cls.worker_started = False + cls.worker_notify.set() + + @classmethod + def notify_and_wait(cls): + cls.callback.clear() + cls.worker_notify.set() + cls.callback.wait() + + @classmethod + def wait_for_recent(cls, delta=4, poll=30): + cls.get_cryptoportfolio() + while cls.last_date.get() is None or datetime.datetime.now() - cls.last_date.get() > datetime.timedelta(delta): + if cls.worker is None: + time.sleep(poll) + cls.report.print_log("Attempt to fetch up-to-date cryptoportfolio") + cls.get_cryptoportfolio(refetch=True) + + @classmethod + def repartition(cls, liquidity="medium"): + cls.get_cryptoportfolio() + liquidities = cls.liquidities.get(liquidity) + return liquidities[cls.last_date.get()] + + @classmethod + def get_cryptoportfolio(cls, refetch=False): + if cls.data.get() is not None and not refetch: + return + if cls.worker is not None and not cls.is_worker_thread(): + cls.notify_and_wait() + return + try: + r = requests.get(cls.URL) + cls.report.log_http_request(r.request.method, + r.request.url, r.request.body, r.request.headers, r) + except Exception as e: + cls.report.log_error("get_cryptoportfolio", exception=e) + return + try: + cls.data.set(r.json(parse_int=D, parse_float=D)) + cls.parse_cryptoportfolio() + except (JSONDecodeError, SimpleJSONDecodeError): + cls.data.set(None) + cls.last_date.set(None) + cls.liquidities.set({}) + + @classmethod + def parse_cryptoportfolio(cls): + def filter_weights(weight_hash): + if weight_hash[1][0] == 0: + return False + if weight_hash[0] == "_row": + return False + return True + + def clean_weights(i): + def clean_weights_(h): + if h[0].endswith("s"): + return [h[0][0:-1], (h[1][i], "short")] + else: + return [h[0], (h[1][i], "long")] + return clean_weights_ + + def parse_weights(portfolio_hash): + if "weights" not in portfolio_hash: + return {} + weights_hash = portfolio_hash["weights"] + weights = {} + for i in range(len(weights_hash["_row"])): + date = datetime.datetime.strptime(weights_hash["_row"][i], "%Y-%m-%d") + weights[date] = dict(filter( + filter_weights, + map(clean_weights(i), weights_hash.items()))) + return weights + + high_liquidity = parse_weights(cls.data.get("portfolio_1")) + medium_liquidity = parse_weights(cls.data.get("portfolio_2")) + + cls.liquidities.set({ + "medium": medium_liquidity, + "high": high_liquidity, + }) + cls.last_date.set(max( + max(medium_liquidity.keys(), default=datetime.datetime(1, 1, 1)), + max(high_liquidity.keys(), default=datetime.datetime(1, 1, 1)) + ))