]>
Commit | Line | Data |
---|---|---|
1 | import time | |
2 | import requests | |
3 | import portfolio | |
4 | import simplejson as json | |
5 | from decimal import Decimal as D, ROUND_DOWN | |
6 | import datetime | |
7 | import inspect | |
8 | from json import JSONDecodeError | |
9 | from simplejson.errors import JSONDecodeError as SimpleJSONDecodeError | |
10 | import dbs | |
11 | ||
12 | __all__ = ["Portfolio", "BalanceStore", "ReportStore", "TradeStore"] | |
13 | ||
14 | class ReportStore: | |
15 | def __init__(self, market, verbose_print=True, no_http_dup=False): | |
16 | self.market = market | |
17 | self.verbose_print = verbose_print | |
18 | ||
19 | self.print_logs = [] | |
20 | self.logs = [] | |
21 | self.redis_status = [] | |
22 | ||
23 | self.no_http_dup = no_http_dup | |
24 | self.last_http = None | |
25 | ||
26 | def merge(self, other_report): | |
27 | self.logs += other_report.logs | |
28 | self.logs.sort(key=lambda x: x["date"]) | |
29 | ||
30 | self.print_logs += other_report.print_logs | |
31 | self.print_logs.sort(key=lambda x: x[0]) | |
32 | ||
33 | def print_log(self, message): | |
34 | now = datetime.datetime.now() | |
35 | message = "{:%Y-%m-%d %H:%M:%S}: {}".format(now, str(message)) | |
36 | self.print_logs.append([now, message]) | |
37 | if self.verbose_print: | |
38 | print(message) | |
39 | ||
40 | def add_log(self, hash_): | |
41 | hash_["date"] = datetime.datetime.now() | |
42 | if self.market is not None: | |
43 | hash_["user_id"] = self.market.user_id | |
44 | hash_["market_id"] = self.market.market_id | |
45 | else: | |
46 | hash_["user_id"] = None | |
47 | hash_["market_id"] = None | |
48 | self.logs.append(hash_) | |
49 | return hash_ | |
50 | ||
51 | def add_redis_status(self, hash_): | |
52 | self.redis_status.append(hash_) | |
53 | return hash_ | |
54 | ||
55 | @staticmethod | |
56 | def default_json_serial(obj): | |
57 | if isinstance(obj, (datetime.datetime, datetime.date)): | |
58 | return obj.isoformat() | |
59 | return str(obj) | |
60 | ||
61 | def to_json(self): | |
62 | return json.dumps(self.logs, default=self.default_json_serial, indent=" ") | |
63 | ||
64 | def to_json_array(self): | |
65 | for log in (x.copy() for x in self.logs): | |
66 | yield ( | |
67 | log.pop("date"), | |
68 | log.pop("type"), | |
69 | json.dumps(log, default=self.default_json_serial, indent=" ") | |
70 | ) | |
71 | ||
72 | def to_json_redis(self): | |
73 | for log in (x.copy() for x in self.redis_status): | |
74 | yield ( | |
75 | log.pop("type"), | |
76 | json.dumps(log, default=self.default_json_serial) | |
77 | ) | |
78 | ||
79 | def set_verbose(self, verbose_print): | |
80 | self.verbose_print = verbose_print | |
81 | ||
82 | def log_stage(self, stage, **kwargs): | |
83 | def as_json(element): | |
84 | if callable(element): | |
85 | return inspect.getsource(element).strip() | |
86 | elif hasattr(element, "as_json"): | |
87 | return element.as_json() | |
88 | else: | |
89 | return element | |
90 | ||
91 | args = { k: as_json(v) for k, v in kwargs.items() } | |
92 | args_str = ["{}={}".format(k, v) for k, v in args.items()] | |
93 | self.print_log("-" * (len(stage) + 8)) | |
94 | self.print_log("[Stage] {} {}".format(stage, ", ".join(args_str))) | |
95 | ||
96 | self.add_log({ | |
97 | "type": "stage", | |
98 | "stage": stage, | |
99 | "args": args, | |
100 | }) | |
101 | ||
102 | def log_balances(self, tag=None, checkpoint=None, tickers=None, | |
103 | ticker_currency=None, compute_value=None, type=None): | |
104 | self.print_log("[Balance]") | |
105 | for currency, balance in self.market.balances.all.items(): | |
106 | self.print_log("\t{}".format(balance)) | |
107 | ||
108 | log = { | |
109 | "type": "balance", | |
110 | "tag": tag, | |
111 | "checkpoint": checkpoint, | |
112 | "balances": self.market.balances.as_json() | |
113 | } | |
114 | ||
115 | if tickers is not None: | |
116 | log["tickers"] = self._ticker_hash(tickers, ticker_currency, | |
117 | compute_value, type) | |
118 | ||
119 | self.add_log(log.copy()) | |
120 | self.add_redis_status(log) | |
121 | ||
122 | def log_tickers(self, amounts, other_currency, | |
123 | compute_value, type): | |
124 | log = self._ticker_hash(amounts, other_currency, compute_value, | |
125 | type) | |
126 | log["type"] = "tickers" | |
127 | ||
128 | self.add_log(log) | |
129 | ||
130 | def _ticker_hash(self, amounts, other_currency, compute_value, type): | |
131 | values = {} | |
132 | rates = {} | |
133 | if callable(compute_value): | |
134 | compute_value = inspect.getsource(compute_value).strip() | |
135 | ||
136 | for currency, amount in amounts.items(): | |
137 | values[currency] = amount.as_json()["value"] | |
138 | rates[currency] = amount.rate | |
139 | return { | |
140 | "compute_value": compute_value, | |
141 | "balance_type": type, | |
142 | "currency": other_currency, | |
143 | "balances": values, | |
144 | "rates": rates, | |
145 | "total": sum(amounts.values()).as_json()["value"] | |
146 | } | |
147 | ||
148 | def log_dispatch(self, amount, amounts, liquidity, repartition): | |
149 | self.add_log({ | |
150 | "type": "dispatch", | |
151 | "liquidity": liquidity, | |
152 | "repartition_ratio": repartition, | |
153 | "total_amount": amount.as_json(), | |
154 | "repartition": { k: v.as_json()["value"] for k, v in amounts.items() } | |
155 | }) | |
156 | ||
157 | def log_trades(self, matching_and_trades, only): | |
158 | trades = [] | |
159 | for matching, trade in matching_and_trades: | |
160 | trade_json = trade.as_json() | |
161 | trade_json["skipped"] = not matching | |
162 | trades.append(trade_json) | |
163 | ||
164 | self.add_log({ | |
165 | "type": "trades", | |
166 | "only": only, | |
167 | "debug": self.market.debug, | |
168 | "trades": trades | |
169 | }) | |
170 | ||
171 | def log_orders(self, orders, tick=None, only=None, compute_value=None): | |
172 | if callable(compute_value): | |
173 | compute_value = inspect.getsource(compute_value).strip() | |
174 | self.print_log("[Orders]") | |
175 | self.market.trades.print_all_with_order(ind="\t") | |
176 | self.add_log({ | |
177 | "type": "orders", | |
178 | "only": only, | |
179 | "compute_value": compute_value, | |
180 | "tick": tick, | |
181 | "orders": [order.as_json() for order in orders if order is not None] | |
182 | }) | |
183 | ||
184 | def log_order(self, order, tick, finished=False, update=None, | |
185 | new_order=None, compute_value=None): | |
186 | if callable(compute_value): | |
187 | compute_value = inspect.getsource(compute_value).strip() | |
188 | if finished: | |
189 | self.print_log("[Order] Finished {}".format(order)) | |
190 | elif update == "waiting": | |
191 | self.print_log("[Order] {}, tick {}, waiting".format(order, tick)) | |
192 | elif update == "adjusting": | |
193 | self.print_log("[Order] {}, tick {}, cancelling and adjusting to {}".format(order, tick, new_order)) | |
194 | elif update == "market_fallback": | |
195 | self.print_log("[Order] {}, tick {}, fallbacking to market value".format(order, tick)) | |
196 | elif update == "market_adjust": | |
197 | self.print_log("[Order] {}, tick {}, market value, cancelling and adjusting to {}".format(order, tick, new_order)) | |
198 | ||
199 | self.add_log({ | |
200 | "type": "order", | |
201 | "tick": tick, | |
202 | "update": update, | |
203 | "order": order.as_json(), | |
204 | "compute_value": compute_value, | |
205 | "new_order": new_order.as_json() if new_order is not None else None | |
206 | }) | |
207 | ||
208 | def log_move_balances(self, needed, moving): | |
209 | self.add_log({ | |
210 | "type": "move_balances", | |
211 | "debug": self.market.debug, | |
212 | "needed": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in needed.items() }, | |
213 | "moving": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in moving.items() }, | |
214 | }) | |
215 | ||
216 | def log_http_request(self, method, url, body, headers, response): | |
217 | if isinstance(response, Exception): | |
218 | self.add_log({ | |
219 | "type": "http_request", | |
220 | "method": method, | |
221 | "url": url, | |
222 | "body": body, | |
223 | "headers": headers, | |
224 | "status": -1, | |
225 | "response": None, | |
226 | "error": response.__class__.__name__, | |
227 | "error_message": str(response), | |
228 | }) | |
229 | self.last_http = None | |
230 | elif self.no_http_dup and \ | |
231 | self.last_http is not None and \ | |
232 | self.last_http["url"] == url and \ | |
233 | self.last_http["method"] == method and \ | |
234 | self.last_http["response"] == response.text: | |
235 | self.add_log({ | |
236 | "type": "http_request", | |
237 | "method": method, | |
238 | "url": url, | |
239 | "body": body, | |
240 | "headers": headers, | |
241 | "status": response.status_code, | |
242 | "duration": response.elapsed.total_seconds(), | |
243 | "response": None, | |
244 | "response_same_as": self.last_http["date"] | |
245 | }) | |
246 | else: | |
247 | self.last_http = self.add_log({ | |
248 | "type": "http_request", | |
249 | "method": method, | |
250 | "url": url, | |
251 | "body": body, | |
252 | "headers": headers, | |
253 | "status": response.status_code, | |
254 | "duration": response.elapsed.total_seconds(), | |
255 | "response": response.text, | |
256 | "response_same_as": None, | |
257 | }) | |
258 | ||
259 | def log_error(self, action, message=None, exception=None): | |
260 | self.print_log("[Error] {}".format(action)) | |
261 | if exception is not None: | |
262 | self.print_log(str("\t{}: {}".format(exception.__class__.__name__, exception))) | |
263 | if message is not None: | |
264 | self.print_log("\t{}".format(message)) | |
265 | ||
266 | self.add_log({ | |
267 | "type": "error", | |
268 | "action": action, | |
269 | "exception_class": exception.__class__.__name__ if exception is not None else None, | |
270 | "exception_message": str(exception) if exception is not None else None, | |
271 | "message": message, | |
272 | }) | |
273 | ||
274 | def log_debug_action(self, action): | |
275 | self.print_log("[Debug] {}".format(action)) | |
276 | ||
277 | self.add_log({ | |
278 | "type": "debug_action", | |
279 | "action": action, | |
280 | }) | |
281 | ||
282 | def log_market(self, args): | |
283 | self.add_log({ | |
284 | "type": "market", | |
285 | "commit": "$Format:%H$", | |
286 | "args": vars(args), | |
287 | }) | |
288 | ||
289 | class BalanceStore: | |
290 | def __init__(self, market): | |
291 | self.market = market | |
292 | self.all = {} | |
293 | ||
294 | def currencies(self): | |
295 | return self.all.keys() | |
296 | ||
297 | def in_currency(self, other_currency, compute_value="average", type="total"): | |
298 | amounts = {} | |
299 | for currency, balance in self.all.items(): | |
300 | other_currency_amount = getattr(balance, type)\ | |
301 | .in_currency(other_currency, self.market, compute_value=compute_value) | |
302 | amounts[currency] = other_currency_amount | |
303 | self.market.report.log_tickers(amounts, other_currency, | |
304 | compute_value, type) | |
305 | return amounts | |
306 | ||
307 | def fetch_balances(self, tag=None, add_portfolio=False, | |
308 | checkpoint=None, log_tickers=False, add_usdt=False, | |
309 | ticker_currency="BTC", ticker_compute_value="average", ticker_type="total"): | |
310 | all_balances = self.market.ccxt.fetch_all_balances() | |
311 | for currency, balance in all_balances.items(): | |
312 | if balance["exchange_total"] != 0 or balance["margin_total"] != 0 or \ | |
313 | currency in self.all: | |
314 | self.all[currency] = portfolio.Balance(currency, balance) | |
315 | if add_portfolio: | |
316 | for currency in Portfolio.repartition(from_cache=True): | |
317 | self.all.setdefault(currency, portfolio.Balance(currency, {})) | |
318 | if add_usdt: | |
319 | self.all.setdefault("USDT", portfolio.Balance("USDT", {})) | |
320 | if log_tickers: | |
321 | tickers = self.in_currency(ticker_currency, compute_value=ticker_compute_value, type=ticker_type) | |
322 | self.market.report.log_balances(tag=tag, checkpoint=checkpoint, | |
323 | tickers=tickers, ticker_currency=ticker_currency, | |
324 | compute_value=ticker_compute_value, type=ticker_type) | |
325 | else: | |
326 | self.market.report.log_balances(tag=tag, checkpoint=checkpoint) | |
327 | ||
328 | def dispatch_assets(self, amount, liquidity="medium", repartition=None): | |
329 | if repartition is None: | |
330 | repartition = Portfolio.repartition(liquidity=liquidity) | |
331 | sum_ratio = sum([v[0] for k, v in repartition.items()]) | |
332 | amounts = {} | |
333 | for currency, (ptt, trade_type) in repartition.items(): | |
334 | amounts[currency] = ptt * amount / sum_ratio | |
335 | if trade_type == "short": | |
336 | amounts[currency] = - amounts[currency] | |
337 | self.all.setdefault(currency, portfolio.Balance(currency, {})) | |
338 | self.market.report.log_dispatch(amount, amounts, liquidity, repartition) | |
339 | return amounts | |
340 | ||
341 | def as_json(self): | |
342 | return { k: v.as_json() for k, v in self.all.items() } | |
343 | ||
344 | class TradeStore: | |
345 | def __init__(self, market): | |
346 | self.market = market | |
347 | self.all = [] | |
348 | ||
349 | @property | |
350 | def pending(self): | |
351 | return list(filter(lambda t: t.pending, self.all)) | |
352 | ||
353 | def compute_trades(self, values_in_base, new_repartition, only=None): | |
354 | computed_trades = [] | |
355 | base_currency = sum(values_in_base.values()).currency | |
356 | for currency in self.market.balances.currencies(): | |
357 | if currency == base_currency: | |
358 | continue | |
359 | value_from = values_in_base.get(currency, portfolio.Amount(base_currency, 0)) | |
360 | value_to = new_repartition.get(currency, portfolio.Amount(base_currency, 0)) | |
361 | ||
362 | if value_from.value * value_to.value < 0: | |
363 | computed_trades.append(self.trade_if_matching( | |
364 | value_from, portfolio.Amount(base_currency, 0), | |
365 | currency, only=only)) | |
366 | computed_trades.append(self.trade_if_matching( | |
367 | portfolio.Amount(base_currency, 0), value_to, | |
368 | currency, only=only)) | |
369 | else: | |
370 | computed_trades.append(self.trade_if_matching( | |
371 | value_from, value_to, | |
372 | currency, only=only)) | |
373 | for matching, trade in computed_trades: | |
374 | if matching: | |
375 | self.all.append(trade) | |
376 | self.market.report.log_trades(computed_trades, only) | |
377 | ||
378 | def trade_if_matching(self, value_from, value_to, currency, | |
379 | only=None): | |
380 | trade = portfolio.Trade(value_from, value_to, currency, | |
381 | self.market) | |
382 | matching = only is None or trade.action == only | |
383 | return [matching, trade] | |
384 | ||
385 | def prepare_orders(self, only=None, compute_value="default"): | |
386 | orders = [] | |
387 | for trade in self.pending: | |
388 | if only is None or trade.action == only: | |
389 | orders.append(trade.prepare_order(compute_value=compute_value)) | |
390 | self.market.report.log_orders(orders, only, compute_value) | |
391 | ||
392 | def close_trades(self): | |
393 | for trade in self.all: | |
394 | trade.close() | |
395 | ||
396 | def print_all_with_order(self, ind=""): | |
397 | for trade in self.all: | |
398 | trade.print_with_order(ind=ind) | |
399 | ||
400 | def run_orders(self): | |
401 | orders = self.all_orders(state="pending") | |
402 | for order in orders: | |
403 | order.run() | |
404 | self.market.report.log_stage("run_orders") | |
405 | self.market.report.log_orders(orders) | |
406 | ||
407 | def all_orders(self, state=None): | |
408 | all_orders = sum(map(lambda v: v.orders, self.all), []) | |
409 | if state is None: | |
410 | return all_orders | |
411 | else: | |
412 | return list(filter(lambda o: o.status == state, all_orders)) | |
413 | ||
414 | def update_all_orders_status(self): | |
415 | for order in self.all_orders(state="open"): | |
416 | order.get_status() | |
417 | ||
418 | class NoopLock: | |
419 | def __enter__(self, *args): | |
420 | pass | |
421 | def __exit__(self, *args): | |
422 | pass | |
423 | ||
424 | class LockedVar: | |
425 | def __init__(self, value): | |
426 | self.lock = NoopLock() | |
427 | self.val = value | |
428 | ||
429 | def start_lock(self): | |
430 | import threading | |
431 | self.lock = threading.Lock() | |
432 | ||
433 | def set(self, value): | |
434 | with self.lock: | |
435 | self.val = value | |
436 | ||
437 | def get(self, key=None): | |
438 | with self.lock: | |
439 | if key is not None and isinstance(self.val, dict): | |
440 | return self.val.get(key) | |
441 | else: | |
442 | return self.val | |
443 | ||
444 | def __getattr__(self, key): | |
445 | with self.lock: | |
446 | return getattr(self.val, key) | |
447 | ||
448 | class Portfolio: | |
449 | URL = "https://cryptoportfolio.io/wp-content/uploads/portfolio/json/cryptoportfolio.json" | |
450 | data = LockedVar(None) | |
451 | liquidities = LockedVar({}) | |
452 | last_date = LockedVar(None) | |
453 | report = LockedVar(ReportStore(None, no_http_dup=True)) | |
454 | worker = None | |
455 | worker_tag = "" | |
456 | worker_started = False | |
457 | worker_notify = None | |
458 | callback = None | |
459 | ||
460 | @classmethod | |
461 | def start_worker(cls, poll=30): | |
462 | import threading | |
463 | ||
464 | cls.worker = threading.Thread(name="portfolio", daemon=True, | |
465 | target=cls.wait_for_notification, kwargs={"poll": poll}) | |
466 | cls.worker_notify = threading.Event() | |
467 | cls.callback = threading.Event() | |
468 | ||
469 | cls.last_date.start_lock() | |
470 | cls.liquidities.start_lock() | |
471 | cls.report.start_lock() | |
472 | ||
473 | cls.worker_tag = "[Worker] " | |
474 | cls.worker_started = True | |
475 | cls.worker.start() | |
476 | ||
477 | @classmethod | |
478 | def is_worker_thread(cls): | |
479 | if cls.worker is None: | |
480 | return False | |
481 | else: | |
482 | import threading | |
483 | return cls.worker == threading.current_thread() | |
484 | ||
485 | @classmethod | |
486 | def wait_for_notification(cls, poll=30): | |
487 | if not cls.is_worker_thread(): | |
488 | raise RuntimeError("This method needs to be ran with the worker") | |
489 | while cls.worker_started: | |
490 | cls.worker_notify.wait() | |
491 | if cls.worker_started: | |
492 | cls.worker_notify.clear() | |
493 | cls.report.print_log("[Worker] Fetching cryptoportfolio") | |
494 | cls.get_cryptoportfolio(refetch=True) | |
495 | cls.callback.set() | |
496 | time.sleep(poll) | |
497 | ||
498 | @classmethod | |
499 | def stop_worker(cls): | |
500 | cls.worker_started = False | |
501 | cls.worker_notify.set() | |
502 | ||
503 | @classmethod | |
504 | def notify_and_wait(cls): | |
505 | cls.callback.clear() | |
506 | cls.worker_notify.set() | |
507 | cls.callback.wait() | |
508 | ||
509 | @classmethod | |
510 | def wait_for_recent(cls, delta=4, poll=30): | |
511 | cls.get_cryptoportfolio() | |
512 | while cls.last_date.get() is None or datetime.datetime.now() - cls.last_date.get() > datetime.timedelta(delta): | |
513 | if cls.worker is None: | |
514 | time.sleep(poll) | |
515 | cls.report.print_log("Attempt to fetch up-to-date cryptoportfolio") | |
516 | cls.get_cryptoportfolio(refetch=True) | |
517 | ||
518 | @classmethod | |
519 | def repartition(cls, liquidity="medium", from_cache=False): | |
520 | if from_cache: | |
521 | cls.retrieve_cryptoportfolio() | |
522 | cls.get_cryptoportfolio() | |
523 | liquidities = cls.liquidities.get(liquidity) | |
524 | return liquidities[cls.last_date.get()] | |
525 | ||
526 | @classmethod | |
527 | def get_cryptoportfolio(cls, refetch=False): | |
528 | if cls.data.get() is not None and not refetch: | |
529 | return | |
530 | if cls.worker is not None and not cls.is_worker_thread(): | |
531 | cls.notify_and_wait() | |
532 | return | |
533 | try: | |
534 | r = requests.get(cls.URL) | |
535 | cls.report.log_http_request(r.request.method, | |
536 | r.request.url, r.request.body, r.request.headers, r) | |
537 | except Exception as e: | |
538 | cls.report.log_error("{}get_cryptoportfolio".format(cls.worker_tag), exception=e) | |
539 | return | |
540 | try: | |
541 | cls.data.set(r.json(parse_int=D, parse_float=D)) | |
542 | cls.parse_cryptoportfolio() | |
543 | cls.store_cryptoportfolio() | |
544 | except (JSONDecodeError, SimpleJSONDecodeError): | |
545 | cls.data.set(None) | |
546 | cls.last_date.set(None) | |
547 | cls.liquidities.set({}) | |
548 | ||
549 | @classmethod | |
550 | def retrieve_cryptoportfolio(cls): | |
551 | if dbs.redis_connected(): | |
552 | repartition = dbs.redis.get("/cryptoportfolio/repartition/latest") | |
553 | date = dbs.redis.get("/cryptoportfolio/repartition/date") | |
554 | if date is not None and repartition is not None: | |
555 | date = datetime.datetime.strptime(date.decode(), "%Y-%m-%d") | |
556 | repartition = json.loads(repartition, parse_int=D, parse_float=D) | |
557 | repartition = { k: { date: v } for k, v in repartition.items() } | |
558 | ||
559 | cls.data.set("") | |
560 | cls.last_date.set(date) | |
561 | cls.liquidities.set(repartition) | |
562 | ||
563 | @classmethod | |
564 | def store_cryptoportfolio(cls): | |
565 | if dbs.redis_connected(): | |
566 | hash_ = {} | |
567 | for liquidity, repartitions in cls.liquidities.items(): | |
568 | hash_[liquidity] = repartitions[cls.last_date.get()] | |
569 | dump = json.dumps(hash_) | |
570 | key = "/cryptoportfolio/repartition/latest" | |
571 | dbs.redis.set(key, dump) | |
572 | key = "/cryptoportfolio/repartition/date" | |
573 | dbs.redis.set(key, cls.last_date.date().isoformat()) | |
574 | ||
575 | @classmethod | |
576 | def parse_cryptoportfolio(cls): | |
577 | def filter_weights(weight_hash): | |
578 | if weight_hash[1][0] == 0: | |
579 | return False | |
580 | if weight_hash[0] == "_row": | |
581 | return False | |
582 | return True | |
583 | ||
584 | def clean_weights(i): | |
585 | def clean_weights_(h): | |
586 | if h[0].endswith("s"): | |
587 | return [h[0][0:-1], (h[1][i], "short")] | |
588 | else: | |
589 | return [h[0], (h[1][i], "long")] | |
590 | return clean_weights_ | |
591 | ||
592 | def parse_weights(portfolio_hash): | |
593 | if "weights" not in portfolio_hash: | |
594 | return {} | |
595 | weights_hash = portfolio_hash["weights"] | |
596 | weights = {} | |
597 | for i in range(len(weights_hash["_row"])): | |
598 | date = datetime.datetime.strptime(weights_hash["_row"][i], "%Y-%m-%d") | |
599 | weights[date] = dict(filter( | |
600 | filter_weights, | |
601 | map(clean_weights(i), weights_hash.items()))) | |
602 | return weights | |
603 | ||
604 | high_liquidity = parse_weights(cls.data.get("portfolio_1")) | |
605 | medium_liquidity = parse_weights(cls.data.get("portfolio_2")) | |
606 | ||
607 | cls.liquidities.set({ | |
608 | "medium": medium_liquidity, | |
609 | "high": high_liquidity, | |
610 | }) | |
611 | cls.last_date.set(max( | |
612 | max(medium_liquidity.keys(), default=datetime.datetime(1, 1, 1)), | |
613 | max(high_liquidity.keys(), default=datetime.datetime(1, 1, 1)) | |
614 | )) | |
615 |