]>
Commit | Line | Data |
---|---|---|
1 | import time | |
2 | import requests | |
3 | import portfolio | |
4 | import simplejson as json | |
5 | from decimal import Decimal as D, ROUND_DOWN | |
6 | import datetime | |
7 | import inspect | |
8 | from json import JSONDecodeError | |
9 | from simplejson.errors import JSONDecodeError as SimpleJSONDecodeError | |
10 | ||
11 | __all__ = ["Portfolio", "BalanceStore", "ReportStore", "TradeStore"] | |
12 | ||
13 | class ReportStore: | |
14 | def __init__(self, market, verbose_print=True, no_http_dup=False): | |
15 | self.market = market | |
16 | self.verbose_print = verbose_print | |
17 | ||
18 | self.print_logs = [] | |
19 | self.logs = [] | |
20 | ||
21 | self.no_http_dup = no_http_dup | |
22 | self.last_http = None | |
23 | ||
24 | def merge(self, other_report): | |
25 | self.logs += other_report.logs | |
26 | self.logs.sort(key=lambda x: x["date"]) | |
27 | ||
28 | self.print_logs += other_report.print_logs | |
29 | self.print_logs.sort(key=lambda x: x[0]) | |
30 | ||
31 | def print_log(self, message): | |
32 | now = datetime.datetime.now() | |
33 | message = "{:%Y-%m-%d %H:%M:%S}: {}".format(now, str(message)) | |
34 | self.print_logs.append([now, message]) | |
35 | if self.verbose_print: | |
36 | print(message) | |
37 | ||
38 | def add_log(self, hash_): | |
39 | hash_["date"] = datetime.datetime.now() | |
40 | if self.market is not None: | |
41 | hash_["user_id"] = self.market.user_id | |
42 | hash_["market_id"] = self.market.market_id | |
43 | else: | |
44 | hash_["user_id"] = None | |
45 | hash_["market_id"] = None | |
46 | self.logs.append(hash_) | |
47 | return hash_ | |
48 | ||
49 | @staticmethod | |
50 | def default_json_serial(obj): | |
51 | if isinstance(obj, (datetime.datetime, datetime.date)): | |
52 | return obj.isoformat() | |
53 | return str(obj) | |
54 | ||
55 | def to_json(self): | |
56 | return json.dumps(self.logs, default=self.default_json_serial, indent=" ") | |
57 | ||
58 | def to_json_array(self): | |
59 | for log in (x.copy() for x in self.logs): | |
60 | yield ( | |
61 | log.pop("date"), | |
62 | log.pop("type"), | |
63 | json.dumps(log, default=self.default_json_serial, indent=" ") | |
64 | ) | |
65 | ||
66 | def set_verbose(self, verbose_print): | |
67 | self.verbose_print = verbose_print | |
68 | ||
69 | def log_stage(self, stage, **kwargs): | |
70 | def as_json(element): | |
71 | if callable(element): | |
72 | return inspect.getsource(element).strip() | |
73 | elif hasattr(element, "as_json"): | |
74 | return element.as_json() | |
75 | else: | |
76 | return element | |
77 | ||
78 | args = { k: as_json(v) for k, v in kwargs.items() } | |
79 | args_str = ["{}={}".format(k, v) for k, v in args.items()] | |
80 | self.print_log("-" * (len(stage) + 8)) | |
81 | self.print_log("[Stage] {} {}".format(stage, ", ".join(args_str))) | |
82 | ||
83 | self.add_log({ | |
84 | "type": "stage", | |
85 | "stage": stage, | |
86 | "args": args, | |
87 | }) | |
88 | ||
89 | def log_balances(self, tag=None): | |
90 | self.print_log("[Balance]") | |
91 | for currency, balance in self.market.balances.all.items(): | |
92 | self.print_log("\t{}".format(balance)) | |
93 | ||
94 | self.add_log({ | |
95 | "type": "balance", | |
96 | "tag": tag, | |
97 | "balances": self.market.balances.as_json() | |
98 | }) | |
99 | ||
100 | def log_tickers(self, amounts, other_currency, | |
101 | compute_value, type): | |
102 | values = {} | |
103 | rates = {} | |
104 | if callable(compute_value): | |
105 | compute_value = inspect.getsource(compute_value).strip() | |
106 | ||
107 | for currency, amount in amounts.items(): | |
108 | values[currency] = amount.as_json()["value"] | |
109 | rates[currency] = amount.rate | |
110 | self.add_log({ | |
111 | "type": "tickers", | |
112 | "compute_value": compute_value, | |
113 | "balance_type": type, | |
114 | "currency": other_currency, | |
115 | "balances": values, | |
116 | "rates": rates, | |
117 | "total": sum(amounts.values()).as_json()["value"] | |
118 | }) | |
119 | ||
120 | def log_dispatch(self, amount, amounts, liquidity, repartition): | |
121 | self.add_log({ | |
122 | "type": "dispatch", | |
123 | "liquidity": liquidity, | |
124 | "repartition_ratio": repartition, | |
125 | "total_amount": amount.as_json(), | |
126 | "repartition": { k: v.as_json()["value"] for k, v in amounts.items() } | |
127 | }) | |
128 | ||
129 | def log_trades(self, matching_and_trades, only): | |
130 | trades = [] | |
131 | for matching, trade in matching_and_trades: | |
132 | trade_json = trade.as_json() | |
133 | trade_json["skipped"] = not matching | |
134 | trades.append(trade_json) | |
135 | ||
136 | self.add_log({ | |
137 | "type": "trades", | |
138 | "only": only, | |
139 | "debug": self.market.debug, | |
140 | "trades": trades | |
141 | }) | |
142 | ||
143 | def log_orders(self, orders, tick=None, only=None, compute_value=None): | |
144 | if callable(compute_value): | |
145 | compute_value = inspect.getsource(compute_value).strip() | |
146 | self.print_log("[Orders]") | |
147 | self.market.trades.print_all_with_order(ind="\t") | |
148 | self.add_log({ | |
149 | "type": "orders", | |
150 | "only": only, | |
151 | "compute_value": compute_value, | |
152 | "tick": tick, | |
153 | "orders": [order.as_json() for order in orders if order is not None] | |
154 | }) | |
155 | ||
156 | def log_order(self, order, tick, finished=False, update=None, | |
157 | new_order=None, compute_value=None): | |
158 | if callable(compute_value): | |
159 | compute_value = inspect.getsource(compute_value).strip() | |
160 | if finished: | |
161 | self.print_log("[Order] Finished {}".format(order)) | |
162 | elif update == "waiting": | |
163 | self.print_log("[Order] {}, tick {}, waiting".format(order, tick)) | |
164 | elif update == "adjusting": | |
165 | self.print_log("[Order] {}, tick {}, cancelling and adjusting to {}".format(order, tick, new_order)) | |
166 | elif update == "market_fallback": | |
167 | self.print_log("[Order] {}, tick {}, fallbacking to market value".format(order, tick)) | |
168 | elif update == "market_adjust": | |
169 | self.print_log("[Order] {}, tick {}, market value, cancelling and adjusting to {}".format(order, tick, new_order)) | |
170 | ||
171 | self.add_log({ | |
172 | "type": "order", | |
173 | "tick": tick, | |
174 | "update": update, | |
175 | "order": order.as_json(), | |
176 | "compute_value": compute_value, | |
177 | "new_order": new_order.as_json() if new_order is not None else None | |
178 | }) | |
179 | ||
180 | def log_move_balances(self, needed, moving): | |
181 | self.add_log({ | |
182 | "type": "move_balances", | |
183 | "debug": self.market.debug, | |
184 | "needed": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in needed.items() }, | |
185 | "moving": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in moving.items() }, | |
186 | }) | |
187 | ||
188 | def log_http_request(self, method, url, body, headers, response): | |
189 | if isinstance(response, Exception): | |
190 | self.add_log({ | |
191 | "type": "http_request", | |
192 | "method": method, | |
193 | "url": url, | |
194 | "body": body, | |
195 | "headers": headers, | |
196 | "status": -1, | |
197 | "response": None, | |
198 | "error": response.__class__.__name__, | |
199 | "error_message": str(response), | |
200 | }) | |
201 | self.last_http = None | |
202 | elif self.no_http_dup and \ | |
203 | self.last_http is not None and \ | |
204 | self.last_http["url"] == url and \ | |
205 | self.last_http["method"] == method and \ | |
206 | self.last_http["response"] == response.text: | |
207 | self.add_log({ | |
208 | "type": "http_request", | |
209 | "method": method, | |
210 | "url": url, | |
211 | "body": body, | |
212 | "headers": headers, | |
213 | "status": response.status_code, | |
214 | "response": None, | |
215 | "response_same_as": self.last_http["date"] | |
216 | }) | |
217 | else: | |
218 | self.last_http = self.add_log({ | |
219 | "type": "http_request", | |
220 | "method": method, | |
221 | "url": url, | |
222 | "body": body, | |
223 | "headers": headers, | |
224 | "status": response.status_code, | |
225 | "response": response.text, | |
226 | "response_same_as": None, | |
227 | }) | |
228 | ||
229 | def log_error(self, action, message=None, exception=None): | |
230 | self.print_log("[Error] {}".format(action)) | |
231 | if exception is not None: | |
232 | self.print_log(str("\t{}: {}".format(exception.__class__.__name__, exception))) | |
233 | if message is not None: | |
234 | self.print_log("\t{}".format(message)) | |
235 | ||
236 | self.add_log({ | |
237 | "type": "error", | |
238 | "action": action, | |
239 | "exception_class": exception.__class__.__name__ if exception is not None else None, | |
240 | "exception_message": str(exception) if exception is not None else None, | |
241 | "message": message, | |
242 | }) | |
243 | ||
244 | def log_debug_action(self, action): | |
245 | self.print_log("[Debug] {}".format(action)) | |
246 | ||
247 | self.add_log({ | |
248 | "type": "debug_action", | |
249 | "action": action, | |
250 | }) | |
251 | ||
252 | def log_market(self, args): | |
253 | self.add_log({ | |
254 | "type": "market", | |
255 | "commit": "$Format:%H$", | |
256 | "args": vars(args), | |
257 | }) | |
258 | ||
259 | class BalanceStore: | |
260 | def __init__(self, market): | |
261 | self.market = market | |
262 | self.all = {} | |
263 | ||
264 | def currencies(self): | |
265 | return self.all.keys() | |
266 | ||
267 | def in_currency(self, other_currency, compute_value="average", type="total"): | |
268 | amounts = {} | |
269 | for currency, balance in self.all.items(): | |
270 | other_currency_amount = getattr(balance, type)\ | |
271 | .in_currency(other_currency, self.market, compute_value=compute_value) | |
272 | amounts[currency] = other_currency_amount | |
273 | self.market.report.log_tickers(amounts, other_currency, | |
274 | compute_value, type) | |
275 | return amounts | |
276 | ||
277 | def fetch_balances(self, tag=None): | |
278 | all_balances = self.market.ccxt.fetch_all_balances() | |
279 | for currency, balance in all_balances.items(): | |
280 | if balance["exchange_total"] != 0 or balance["margin_total"] != 0 or \ | |
281 | currency in self.all: | |
282 | self.all[currency] = portfolio.Balance(currency, balance) | |
283 | self.market.report.log_balances(tag=tag) | |
284 | ||
285 | def dispatch_assets(self, amount, liquidity="medium", repartition=None): | |
286 | if repartition is None: | |
287 | repartition = Portfolio.repartition(liquidity=liquidity) | |
288 | sum_ratio = sum([v[0] for k, v in repartition.items()]) | |
289 | amounts = {} | |
290 | for currency, (ptt, trade_type) in repartition.items(): | |
291 | amounts[currency] = ptt * amount / sum_ratio | |
292 | if trade_type == "short": | |
293 | amounts[currency] = - amounts[currency] | |
294 | self.all.setdefault(currency, portfolio.Balance(currency, {})) | |
295 | self.market.report.log_dispatch(amount, amounts, liquidity, repartition) | |
296 | return amounts | |
297 | ||
298 | def as_json(self): | |
299 | return { k: v.as_json() for k, v in self.all.items() } | |
300 | ||
301 | class TradeStore: | |
302 | def __init__(self, market): | |
303 | self.market = market | |
304 | self.all = [] | |
305 | ||
306 | @property | |
307 | def pending(self): | |
308 | return list(filter(lambda t: t.pending, self.all)) | |
309 | ||
310 | def compute_trades(self, values_in_base, new_repartition, only=None): | |
311 | computed_trades = [] | |
312 | base_currency = sum(values_in_base.values()).currency | |
313 | for currency in self.market.balances.currencies(): | |
314 | if currency == base_currency: | |
315 | continue | |
316 | value_from = values_in_base.get(currency, portfolio.Amount(base_currency, 0)) | |
317 | value_to = new_repartition.get(currency, portfolio.Amount(base_currency, 0)) | |
318 | ||
319 | if value_from.value * value_to.value < 0: | |
320 | computed_trades.append(self.trade_if_matching( | |
321 | value_from, portfolio.Amount(base_currency, 0), | |
322 | currency, only=only)) | |
323 | computed_trades.append(self.trade_if_matching( | |
324 | portfolio.Amount(base_currency, 0), value_to, | |
325 | currency, only=only)) | |
326 | else: | |
327 | computed_trades.append(self.trade_if_matching( | |
328 | value_from, value_to, | |
329 | currency, only=only)) | |
330 | for matching, trade in computed_trades: | |
331 | if matching: | |
332 | self.all.append(trade) | |
333 | self.market.report.log_trades(computed_trades, only) | |
334 | ||
335 | def trade_if_matching(self, value_from, value_to, currency, | |
336 | only=None): | |
337 | trade = portfolio.Trade(value_from, value_to, currency, | |
338 | self.market) | |
339 | matching = only is None or trade.action == only | |
340 | return [matching, trade] | |
341 | ||
342 | def prepare_orders(self, only=None, compute_value="default"): | |
343 | orders = [] | |
344 | for trade in self.pending: | |
345 | if only is None or trade.action == only: | |
346 | orders.append(trade.prepare_order(compute_value=compute_value)) | |
347 | self.market.report.log_orders(orders, only, compute_value) | |
348 | ||
349 | def close_trades(self): | |
350 | for trade in self.all: | |
351 | trade.close() | |
352 | ||
353 | def print_all_with_order(self, ind=""): | |
354 | for trade in self.all: | |
355 | trade.print_with_order(ind=ind) | |
356 | ||
357 | def run_orders(self): | |
358 | orders = self.all_orders(state="pending") | |
359 | for order in orders: | |
360 | order.run() | |
361 | self.market.report.log_stage("run_orders") | |
362 | self.market.report.log_orders(orders) | |
363 | ||
364 | def all_orders(self, state=None): | |
365 | all_orders = sum(map(lambda v: v.orders, self.all), []) | |
366 | if state is None: | |
367 | return all_orders | |
368 | else: | |
369 | return list(filter(lambda o: o.status == state, all_orders)) | |
370 | ||
371 | def update_all_orders_status(self): | |
372 | for order in self.all_orders(state="open"): | |
373 | order.get_status() | |
374 | ||
375 | class NoopLock: | |
376 | def __enter__(self, *args): | |
377 | pass | |
378 | def __exit__(self, *args): | |
379 | pass | |
380 | ||
381 | class LockedVar: | |
382 | def __init__(self, value): | |
383 | self.lock = NoopLock() | |
384 | self.val = value | |
385 | ||
386 | def start_lock(self): | |
387 | import threading | |
388 | self.lock = threading.Lock() | |
389 | ||
390 | def set(self, value): | |
391 | with self.lock: | |
392 | self.val = value | |
393 | ||
394 | def get(self, key=None): | |
395 | with self.lock: | |
396 | if key is not None and isinstance(self.val, dict): | |
397 | return self.val.get(key) | |
398 | else: | |
399 | return self.val | |
400 | ||
401 | def __getattr__(self, key): | |
402 | with self.lock: | |
403 | return getattr(self.val, key) | |
404 | ||
405 | class Portfolio: | |
406 | URL = "https://cryptoportfolio.io/wp-content/uploads/portfolio/json/cryptoportfolio.json" | |
407 | data = LockedVar(None) | |
408 | liquidities = LockedVar({}) | |
409 | last_date = LockedVar(None) | |
410 | report = LockedVar(ReportStore(None, no_http_dup=True)) | |
411 | worker = None | |
412 | worker_started = False | |
413 | worker_notify = None | |
414 | callback = None | |
415 | ||
416 | @classmethod | |
417 | def start_worker(cls, poll=30): | |
418 | import threading | |
419 | ||
420 | cls.worker = threading.Thread(name="portfolio", daemon=True, | |
421 | target=cls.wait_for_notification, kwargs={"poll": poll}) | |
422 | cls.worker_notify = threading.Event() | |
423 | cls.callback = threading.Event() | |
424 | ||
425 | cls.last_date.start_lock() | |
426 | cls.liquidities.start_lock() | |
427 | cls.report.start_lock() | |
428 | ||
429 | cls.worker_started = True | |
430 | cls.worker.start() | |
431 | ||
432 | @classmethod | |
433 | def is_worker_thread(cls): | |
434 | if cls.worker is None: | |
435 | return False | |
436 | else: | |
437 | import threading | |
438 | return cls.worker == threading.current_thread() | |
439 | ||
440 | @classmethod | |
441 | def wait_for_notification(cls, poll=30): | |
442 | if not cls.is_worker_thread(): | |
443 | raise RuntimeError("This method needs to be ran with the worker") | |
444 | while cls.worker_started: | |
445 | cls.worker_notify.wait() | |
446 | if cls.worker_started: | |
447 | cls.worker_notify.clear() | |
448 | cls.report.print_log("Fetching cryptoportfolio") | |
449 | cls.get_cryptoportfolio(refetch=True) | |
450 | cls.callback.set() | |
451 | time.sleep(poll) | |
452 | ||
453 | @classmethod | |
454 | def stop_worker(cls): | |
455 | cls.worker_started = False | |
456 | cls.worker_notify.set() | |
457 | ||
458 | @classmethod | |
459 | def notify_and_wait(cls): | |
460 | cls.callback.clear() | |
461 | cls.worker_notify.set() | |
462 | cls.callback.wait() | |
463 | ||
464 | @classmethod | |
465 | def wait_for_recent(cls, delta=4, poll=30): | |
466 | cls.get_cryptoportfolio() | |
467 | while cls.last_date.get() is None or datetime.datetime.now() - cls.last_date.get() > datetime.timedelta(delta): | |
468 | if cls.worker is None: | |
469 | time.sleep(poll) | |
470 | cls.report.print_log("Attempt to fetch up-to-date cryptoportfolio") | |
471 | cls.get_cryptoportfolio(refetch=True) | |
472 | ||
473 | @classmethod | |
474 | def repartition(cls, liquidity="medium"): | |
475 | cls.get_cryptoportfolio() | |
476 | liquidities = cls.liquidities.get(liquidity) | |
477 | return liquidities[cls.last_date.get()] | |
478 | ||
479 | @classmethod | |
480 | def get_cryptoportfolio(cls, refetch=False): | |
481 | if cls.data.get() is not None and not refetch: | |
482 | return | |
483 | if cls.worker is not None and not cls.is_worker_thread(): | |
484 | cls.notify_and_wait() | |
485 | return | |
486 | try: | |
487 | r = requests.get(cls.URL) | |
488 | cls.report.log_http_request(r.request.method, | |
489 | r.request.url, r.request.body, r.request.headers, r) | |
490 | except Exception as e: | |
491 | cls.report.log_error("get_cryptoportfolio", exception=e) | |
492 | return | |
493 | try: | |
494 | cls.data.set(r.json(parse_int=D, parse_float=D)) | |
495 | cls.parse_cryptoportfolio() | |
496 | except (JSONDecodeError, SimpleJSONDecodeError): | |
497 | cls.data.set(None) | |
498 | cls.last_date.set(None) | |
499 | cls.liquidities.set({}) | |
500 | ||
501 | @classmethod | |
502 | def parse_cryptoportfolio(cls): | |
503 | def filter_weights(weight_hash): | |
504 | if weight_hash[1][0] == 0: | |
505 | return False | |
506 | if weight_hash[0] == "_row": | |
507 | return False | |
508 | return True | |
509 | ||
510 | def clean_weights(i): | |
511 | def clean_weights_(h): | |
512 | if h[0].endswith("s"): | |
513 | return [h[0][0:-1], (h[1][i], "short")] | |
514 | else: | |
515 | return [h[0], (h[1][i], "long")] | |
516 | return clean_weights_ | |
517 | ||
518 | def parse_weights(portfolio_hash): | |
519 | if "weights" not in portfolio_hash: | |
520 | return {} | |
521 | weights_hash = portfolio_hash["weights"] | |
522 | weights = {} | |
523 | for i in range(len(weights_hash["_row"])): | |
524 | date = datetime.datetime.strptime(weights_hash["_row"][i], "%Y-%m-%d") | |
525 | weights[date] = dict(filter( | |
526 | filter_weights, | |
527 | map(clean_weights(i), weights_hash.items()))) | |
528 | return weights | |
529 | ||
530 | high_liquidity = parse_weights(cls.data.get("portfolio_1")) | |
531 | medium_liquidity = parse_weights(cls.data.get("portfolio_2")) | |
532 | ||
533 | cls.liquidities.set({ | |
534 | "medium": medium_liquidity, | |
535 | "high": high_liquidity, | |
536 | }) | |
537 | cls.last_date.set(max( | |
538 | max(medium_liquidity.keys(), default=datetime.datetime(1, 1, 1)), | |
539 | max(high_liquidity.keys(), default=datetime.datetime(1, 1, 1)) | |
540 | )) | |
541 |