]>
Commit | Line | Data |
---|---|---|
1 | import time | |
2 | import requests | |
3 | import portfolio | |
4 | import simplejson as json | |
5 | from decimal import Decimal as D, ROUND_DOWN | |
6 | import datetime | |
7 | import inspect | |
8 | from json import JSONDecodeError | |
9 | from simplejson.errors import JSONDecodeError as SimpleJSONDecodeError | |
10 | import dbs | |
11 | ||
12 | __all__ = ["Portfolio", "BalanceStore", "ReportStore", "TradeStore"] | |
13 | ||
14 | class ReportStore: | |
15 | def __init__(self, market, verbose_print=True, no_http_dup=False): | |
16 | self.market = market | |
17 | self.verbose_print = verbose_print | |
18 | ||
19 | self.print_logs = [] | |
20 | self.logs = [] | |
21 | self.redis_status = [] | |
22 | ||
23 | self.no_http_dup = no_http_dup | |
24 | self.last_http = None | |
25 | ||
26 | def merge(self, other_report): | |
27 | self.logs += other_report.logs | |
28 | self.logs.sort(key=lambda x: x["date"]) | |
29 | ||
30 | self.print_logs += other_report.print_logs | |
31 | self.print_logs.sort(key=lambda x: x[0]) | |
32 | ||
33 | def print_log(self, message): | |
34 | now = datetime.datetime.now() | |
35 | message = "{:%Y-%m-%d %H:%M:%S}: {}".format(now, str(message)) | |
36 | self.print_logs.append([now, message]) | |
37 | if self.verbose_print: | |
38 | print(message) | |
39 | ||
40 | def add_log(self, hash_): | |
41 | hash_["date"] = datetime.datetime.now() | |
42 | if self.market is not None: | |
43 | hash_["user_id"] = self.market.user_id | |
44 | hash_["market_id"] = self.market.market_id | |
45 | else: | |
46 | hash_["user_id"] = None | |
47 | hash_["market_id"] = None | |
48 | self.logs.append(hash_) | |
49 | return hash_ | |
50 | ||
51 | def add_redis_status(self, hash_): | |
52 | self.redis_status.append(hash_) | |
53 | return hash_ | |
54 | ||
55 | @staticmethod | |
56 | def default_json_serial(obj): | |
57 | if isinstance(obj, (datetime.datetime, datetime.date)): | |
58 | return obj.isoformat() | |
59 | return str(obj) | |
60 | ||
61 | def to_json(self): | |
62 | return json.dumps(self.logs, default=self.default_json_serial, indent=" ") | |
63 | ||
64 | def to_json_array(self): | |
65 | for log in (x.copy() for x in self.logs): | |
66 | yield ( | |
67 | log.pop("date"), | |
68 | log.pop("type"), | |
69 | json.dumps(log, default=self.default_json_serial, indent=" ") | |
70 | ) | |
71 | ||
72 | def to_json_redis(self): | |
73 | for log in (x.copy() for x in self.redis_status): | |
74 | yield ( | |
75 | log.pop("type"), | |
76 | json.dumps(log, default=self.default_json_serial) | |
77 | ) | |
78 | ||
79 | def set_verbose(self, verbose_print): | |
80 | self.verbose_print = verbose_print | |
81 | ||
82 | def log_stage(self, stage, **kwargs): | |
83 | def as_json(element): | |
84 | if callable(element): | |
85 | return inspect.getsource(element).strip() | |
86 | elif hasattr(element, "as_json"): | |
87 | return element.as_json() | |
88 | else: | |
89 | return element | |
90 | ||
91 | args = { k: as_json(v) for k, v in kwargs.items() } | |
92 | args_str = ["{}={}".format(k, v) for k, v in args.items()] | |
93 | self.print_log("-" * (len(stage) + 8)) | |
94 | self.print_log("[Stage] {} {}".format(stage, ", ".join(args_str))) | |
95 | ||
96 | self.add_log({ | |
97 | "type": "stage", | |
98 | "stage": stage, | |
99 | "args": args, | |
100 | }) | |
101 | ||
102 | def log_balances(self, tag=None, checkpoint=None, tickers=None, | |
103 | ticker_currency=None, compute_value=None, type=None): | |
104 | self.print_log("[Balance]") | |
105 | for currency, balance in self.market.balances.all.items(): | |
106 | self.print_log("\t{}".format(balance)) | |
107 | ||
108 | log = { | |
109 | "type": "balance", | |
110 | "tag": tag, | |
111 | "checkpoint": checkpoint, | |
112 | "balances": self.market.balances.as_json() | |
113 | } | |
114 | ||
115 | if tickers is not None: | |
116 | log["tickers"] = self._ticker_hash(tickers, ticker_currency, | |
117 | compute_value, type) | |
118 | ||
119 | self.add_log(log.copy()) | |
120 | self.add_redis_status(log) | |
121 | ||
122 | def log_tickers(self, amounts, other_currency, | |
123 | compute_value, type): | |
124 | log = self._ticker_hash(amounts, other_currency, compute_value, | |
125 | type) | |
126 | log["type"] = "tickers" | |
127 | ||
128 | self.add_log(log) | |
129 | ||
130 | def _ticker_hash(self, amounts, other_currency, compute_value, type): | |
131 | values = {} | |
132 | rates = {} | |
133 | if callable(compute_value): | |
134 | compute_value = inspect.getsource(compute_value).strip() | |
135 | ||
136 | for currency, amount in amounts.items(): | |
137 | values[currency] = amount.as_json()["value"] | |
138 | rates[currency] = amount.rate | |
139 | return { | |
140 | "compute_value": compute_value, | |
141 | "balance_type": type, | |
142 | "currency": other_currency, | |
143 | "balances": values, | |
144 | "rates": rates, | |
145 | "total": sum(amounts.values()).as_json()["value"] | |
146 | } | |
147 | ||
148 | def log_dispatch(self, amount, amounts, liquidity, repartition): | |
149 | self.add_log({ | |
150 | "type": "dispatch", | |
151 | "liquidity": liquidity, | |
152 | "repartition_ratio": repartition, | |
153 | "total_amount": amount.as_json(), | |
154 | "repartition": { k: v.as_json()["value"] for k, v in amounts.items() } | |
155 | }) | |
156 | ||
157 | def log_trades(self, matching_and_trades, only): | |
158 | trades = [] | |
159 | for matching, trade in matching_and_trades: | |
160 | trade_json = trade.as_json() | |
161 | trade_json["skipped"] = not matching | |
162 | trades.append(trade_json) | |
163 | ||
164 | self.add_log({ | |
165 | "type": "trades", | |
166 | "only": only, | |
167 | "debug": self.market.debug, | |
168 | "trades": trades | |
169 | }) | |
170 | ||
171 | def log_orders(self, orders, tick=None, only=None, compute_value=None): | |
172 | if callable(compute_value): | |
173 | compute_value = inspect.getsource(compute_value).strip() | |
174 | self.print_log("[Orders]") | |
175 | self.market.trades.print_all_with_order(ind="\t") | |
176 | self.add_log({ | |
177 | "type": "orders", | |
178 | "only": only, | |
179 | "compute_value": compute_value, | |
180 | "tick": tick, | |
181 | "orders": [order.as_json() for order in orders if order is not None] | |
182 | }) | |
183 | ||
184 | def log_order(self, order, tick, finished=False, update=None, | |
185 | new_order=None, compute_value=None): | |
186 | if callable(compute_value): | |
187 | compute_value = inspect.getsource(compute_value).strip() | |
188 | if finished: | |
189 | self.print_log("[Order] Finished {}".format(order)) | |
190 | elif update == "waiting": | |
191 | self.print_log("[Order] {}, tick {}, waiting".format(order, tick)) | |
192 | elif update == "adjusting": | |
193 | self.print_log("[Order] {}, tick {}, cancelling and adjusting to {}".format(order, tick, new_order)) | |
194 | elif update == "market_fallback": | |
195 | self.print_log("[Order] {}, tick {}, fallbacking to market value".format(order, tick)) | |
196 | elif update == "market_adjust": | |
197 | self.print_log("[Order] {}, tick {}, market value, cancelling and adjusting to {}".format(order, tick, new_order)) | |
198 | ||
199 | self.add_log({ | |
200 | "type": "order", | |
201 | "tick": tick, | |
202 | "update": update, | |
203 | "order": order.as_json(), | |
204 | "compute_value": compute_value, | |
205 | "new_order": new_order.as_json() if new_order is not None else None | |
206 | }) | |
207 | ||
208 | def log_move_balances(self, needed, moving): | |
209 | self.add_log({ | |
210 | "type": "move_balances", | |
211 | "debug": self.market.debug, | |
212 | "needed": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in needed.items() }, | |
213 | "moving": { k: v.as_json()["value"] if isinstance(v, portfolio.Amount) else v for k, v in moving.items() }, | |
214 | }) | |
215 | ||
216 | def log_http_request(self, method, url, body, headers, response): | |
217 | if isinstance(response, Exception): | |
218 | self.add_log({ | |
219 | "type": "http_request", | |
220 | "method": method, | |
221 | "url": url, | |
222 | "body": body, | |
223 | "headers": headers, | |
224 | "status": -1, | |
225 | "response": None, | |
226 | "error": response.__class__.__name__, | |
227 | "error_message": str(response), | |
228 | }) | |
229 | self.last_http = None | |
230 | elif self.no_http_dup and \ | |
231 | self.last_http is not None and \ | |
232 | self.last_http["url"] == url and \ | |
233 | self.last_http["method"] == method and \ | |
234 | self.last_http["response"] == response.text: | |
235 | self.add_log({ | |
236 | "type": "http_request", | |
237 | "method": method, | |
238 | "url": url, | |
239 | "body": body, | |
240 | "headers": headers, | |
241 | "status": response.status_code, | |
242 | "duration": response.elapsed.total_seconds(), | |
243 | "response": None, | |
244 | "response_same_as": self.last_http["date"] | |
245 | }) | |
246 | else: | |
247 | self.last_http = self.add_log({ | |
248 | "type": "http_request", | |
249 | "method": method, | |
250 | "url": url, | |
251 | "body": body, | |
252 | "headers": headers, | |
253 | "status": response.status_code, | |
254 | "duration": response.elapsed.total_seconds(), | |
255 | "response": response.text, | |
256 | "response_same_as": None, | |
257 | }) | |
258 | ||
259 | def log_error(self, action, message=None, exception=None): | |
260 | self.print_log("[Error] {}".format(action)) | |
261 | if exception is not None: | |
262 | self.print_log(str("\t{}: {}".format(exception.__class__.__name__, exception))) | |
263 | if message is not None: | |
264 | self.print_log("\t{}".format(message)) | |
265 | ||
266 | self.add_log({ | |
267 | "type": "error", | |
268 | "action": action, | |
269 | "exception_class": exception.__class__.__name__ if exception is not None else None, | |
270 | "exception_message": str(exception) if exception is not None else None, | |
271 | "message": message, | |
272 | }) | |
273 | ||
274 | def log_debug_action(self, action): | |
275 | self.print_log("[Debug] {}".format(action)) | |
276 | ||
277 | self.add_log({ | |
278 | "type": "debug_action", | |
279 | "action": action, | |
280 | }) | |
281 | ||
282 | def log_market(self, args): | |
283 | self.add_log({ | |
284 | "type": "market", | |
285 | "commit": "$Format:%H$", | |
286 | "args": vars(args), | |
287 | }) | |
288 | ||
289 | class BalanceStore: | |
290 | def __init__(self, market): | |
291 | self.market = market | |
292 | self.all = {} | |
293 | ||
294 | def currencies(self): | |
295 | return self.all.keys() | |
296 | ||
297 | def in_currency(self, other_currency, compute_value="average", type="total"): | |
298 | amounts = {} | |
299 | for currency, balance in self.all.items(): | |
300 | other_currency_amount = getattr(balance, type)\ | |
301 | .in_currency(other_currency, self.market, compute_value=compute_value) | |
302 | amounts[currency] = other_currency_amount | |
303 | self.market.report.log_tickers(amounts, other_currency, | |
304 | compute_value, type) | |
305 | return amounts | |
306 | ||
307 | def fetch_balances(self, tag=None, add_portfolio=False, | |
308 | checkpoint=None, log_tickers=False, | |
309 | ticker_currency="BTC", ticker_compute_value="average", ticker_type="total"): | |
310 | all_balances = self.market.ccxt.fetch_all_balances() | |
311 | for currency, balance in all_balances.items(): | |
312 | if balance["exchange_total"] != 0 or balance["margin_total"] != 0 or \ | |
313 | currency in self.all: | |
314 | self.all[currency] = portfolio.Balance(currency, balance) | |
315 | if add_portfolio: | |
316 | for currency in Portfolio.repartition(from_cache=True): | |
317 | self.all.setdefault(currency, portfolio.Balance(currency, {})) | |
318 | if log_tickers: | |
319 | tickers = self.in_currency(ticker_currency, compute_value=ticker_compute_value, type=ticker_type) | |
320 | self.market.report.log_balances(tag=tag, checkpoint=checkpoint, | |
321 | tickers=tickers, ticker_currency=ticker_currency, | |
322 | compute_value=ticker_compute_value, type=ticker_type) | |
323 | else: | |
324 | self.market.report.log_balances(tag=tag, checkpoint=checkpoint) | |
325 | ||
326 | def dispatch_assets(self, amount, liquidity="medium", repartition=None): | |
327 | if repartition is None: | |
328 | repartition = Portfolio.repartition(liquidity=liquidity) | |
329 | sum_ratio = sum([v[0] for k, v in repartition.items()]) | |
330 | amounts = {} | |
331 | for currency, (ptt, trade_type) in repartition.items(): | |
332 | amounts[currency] = ptt * amount / sum_ratio | |
333 | if trade_type == "short": | |
334 | amounts[currency] = - amounts[currency] | |
335 | self.all.setdefault(currency, portfolio.Balance(currency, {})) | |
336 | self.market.report.log_dispatch(amount, amounts, liquidity, repartition) | |
337 | return amounts | |
338 | ||
339 | def as_json(self): | |
340 | return { k: v.as_json() for k, v in self.all.items() } | |
341 | ||
342 | class TradeStore: | |
343 | def __init__(self, market): | |
344 | self.market = market | |
345 | self.all = [] | |
346 | ||
347 | @property | |
348 | def pending(self): | |
349 | return list(filter(lambda t: t.pending, self.all)) | |
350 | ||
351 | def compute_trades(self, values_in_base, new_repartition, only=None): | |
352 | computed_trades = [] | |
353 | base_currency = sum(values_in_base.values()).currency | |
354 | for currency in self.market.balances.currencies(): | |
355 | if currency == base_currency: | |
356 | continue | |
357 | value_from = values_in_base.get(currency, portfolio.Amount(base_currency, 0)) | |
358 | value_to = new_repartition.get(currency, portfolio.Amount(base_currency, 0)) | |
359 | ||
360 | if value_from.value * value_to.value < 0: | |
361 | computed_trades.append(self.trade_if_matching( | |
362 | value_from, portfolio.Amount(base_currency, 0), | |
363 | currency, only=only)) | |
364 | computed_trades.append(self.trade_if_matching( | |
365 | portfolio.Amount(base_currency, 0), value_to, | |
366 | currency, only=only)) | |
367 | else: | |
368 | computed_trades.append(self.trade_if_matching( | |
369 | value_from, value_to, | |
370 | currency, only=only)) | |
371 | for matching, trade in computed_trades: | |
372 | if matching: | |
373 | self.all.append(trade) | |
374 | self.market.report.log_trades(computed_trades, only) | |
375 | ||
376 | def trade_if_matching(self, value_from, value_to, currency, | |
377 | only=None): | |
378 | trade = portfolio.Trade(value_from, value_to, currency, | |
379 | self.market) | |
380 | matching = only is None or trade.action == only | |
381 | return [matching, trade] | |
382 | ||
383 | def prepare_orders(self, only=None, compute_value="default"): | |
384 | orders = [] | |
385 | for trade in self.pending: | |
386 | if only is None or trade.action == only: | |
387 | orders.append(trade.prepare_order(compute_value=compute_value)) | |
388 | self.market.report.log_orders(orders, only, compute_value) | |
389 | ||
390 | def close_trades(self): | |
391 | for trade in self.all: | |
392 | trade.close() | |
393 | ||
394 | def print_all_with_order(self, ind=""): | |
395 | for trade in self.all: | |
396 | trade.print_with_order(ind=ind) | |
397 | ||
398 | def run_orders(self): | |
399 | orders = self.all_orders(state="pending") | |
400 | for order in orders: | |
401 | order.run() | |
402 | self.market.report.log_stage("run_orders") | |
403 | self.market.report.log_orders(orders) | |
404 | ||
405 | def all_orders(self, state=None): | |
406 | all_orders = sum(map(lambda v: v.orders, self.all), []) | |
407 | if state is None: | |
408 | return all_orders | |
409 | else: | |
410 | return list(filter(lambda o: o.status == state, all_orders)) | |
411 | ||
412 | def update_all_orders_status(self): | |
413 | for order in self.all_orders(state="open"): | |
414 | order.get_status() | |
415 | ||
416 | class NoopLock: | |
417 | def __enter__(self, *args): | |
418 | pass | |
419 | def __exit__(self, *args): | |
420 | pass | |
421 | ||
422 | class LockedVar: | |
423 | def __init__(self, value): | |
424 | self.lock = NoopLock() | |
425 | self.val = value | |
426 | ||
427 | def start_lock(self): | |
428 | import threading | |
429 | self.lock = threading.Lock() | |
430 | ||
431 | def set(self, value): | |
432 | with self.lock: | |
433 | self.val = value | |
434 | ||
435 | def get(self, key=None): | |
436 | with self.lock: | |
437 | if key is not None and isinstance(self.val, dict): | |
438 | return self.val.get(key) | |
439 | else: | |
440 | return self.val | |
441 | ||
442 | def __getattr__(self, key): | |
443 | with self.lock: | |
444 | return getattr(self.val, key) | |
445 | ||
446 | class Portfolio: | |
447 | URL = "https://cryptoportfolio.io/wp-content/uploads/portfolio/json/cryptoportfolio.json" | |
448 | data = LockedVar(None) | |
449 | liquidities = LockedVar({}) | |
450 | last_date = LockedVar(None) | |
451 | report = LockedVar(ReportStore(None, no_http_dup=True)) | |
452 | worker = None | |
453 | worker_tag = "" | |
454 | worker_started = False | |
455 | worker_notify = None | |
456 | callback = None | |
457 | ||
458 | @classmethod | |
459 | def start_worker(cls, poll=30): | |
460 | import threading | |
461 | ||
462 | cls.worker = threading.Thread(name="portfolio", daemon=True, | |
463 | target=cls.wait_for_notification, kwargs={"poll": poll}) | |
464 | cls.worker_notify = threading.Event() | |
465 | cls.callback = threading.Event() | |
466 | ||
467 | cls.last_date.start_lock() | |
468 | cls.liquidities.start_lock() | |
469 | cls.report.start_lock() | |
470 | ||
471 | cls.worker_tag = "[Worker] " | |
472 | cls.worker_started = True | |
473 | cls.worker.start() | |
474 | ||
475 | @classmethod | |
476 | def is_worker_thread(cls): | |
477 | if cls.worker is None: | |
478 | return False | |
479 | else: | |
480 | import threading | |
481 | return cls.worker == threading.current_thread() | |
482 | ||
483 | @classmethod | |
484 | def wait_for_notification(cls, poll=30): | |
485 | if not cls.is_worker_thread(): | |
486 | raise RuntimeError("This method needs to be ran with the worker") | |
487 | while cls.worker_started: | |
488 | cls.worker_notify.wait() | |
489 | if cls.worker_started: | |
490 | cls.worker_notify.clear() | |
491 | cls.report.print_log("[Worker] Fetching cryptoportfolio") | |
492 | cls.get_cryptoportfolio(refetch=True) | |
493 | cls.callback.set() | |
494 | time.sleep(poll) | |
495 | ||
496 | @classmethod | |
497 | def stop_worker(cls): | |
498 | cls.worker_started = False | |
499 | cls.worker_notify.set() | |
500 | ||
501 | @classmethod | |
502 | def notify_and_wait(cls): | |
503 | cls.callback.clear() | |
504 | cls.worker_notify.set() | |
505 | cls.callback.wait() | |
506 | ||
507 | @classmethod | |
508 | def wait_for_recent(cls, delta=4, poll=30): | |
509 | cls.get_cryptoportfolio() | |
510 | while cls.last_date.get() is None or datetime.datetime.now() - cls.last_date.get() > datetime.timedelta(delta): | |
511 | if cls.worker is None: | |
512 | time.sleep(poll) | |
513 | cls.report.print_log("Attempt to fetch up-to-date cryptoportfolio") | |
514 | cls.get_cryptoportfolio(refetch=True) | |
515 | ||
516 | @classmethod | |
517 | def repartition(cls, liquidity="medium", from_cache=False): | |
518 | if from_cache: | |
519 | cls.retrieve_cryptoportfolio() | |
520 | cls.get_cryptoportfolio() | |
521 | liquidities = cls.liquidities.get(liquidity) | |
522 | return liquidities[cls.last_date.get()] | |
523 | ||
524 | @classmethod | |
525 | def get_cryptoportfolio(cls, refetch=False): | |
526 | if cls.data.get() is not None and not refetch: | |
527 | return | |
528 | if cls.worker is not None and not cls.is_worker_thread(): | |
529 | cls.notify_and_wait() | |
530 | return | |
531 | try: | |
532 | r = requests.get(cls.URL) | |
533 | cls.report.log_http_request(r.request.method, | |
534 | r.request.url, r.request.body, r.request.headers, r) | |
535 | except Exception as e: | |
536 | cls.report.log_error("{}get_cryptoportfolio".format(cls.worker_tag), exception=e) | |
537 | return | |
538 | try: | |
539 | cls.data.set(r.json(parse_int=D, parse_float=D)) | |
540 | cls.parse_cryptoportfolio() | |
541 | cls.store_cryptoportfolio() | |
542 | except (JSONDecodeError, SimpleJSONDecodeError): | |
543 | cls.data.set(None) | |
544 | cls.last_date.set(None) | |
545 | cls.liquidities.set({}) | |
546 | ||
547 | @classmethod | |
548 | def retrieve_cryptoportfolio(cls): | |
549 | if dbs.redis_connected(): | |
550 | repartition = dbs.redis.get("/cryptoportfolio/repartition/latest") | |
551 | date = dbs.redis.get("/cryptoportfolio/repartition/date") | |
552 | if date is not None and repartition is not None: | |
553 | date = datetime.datetime.strptime(date.decode(), "%Y-%m-%d") | |
554 | repartition = json.loads(repartition, parse_int=D, parse_float=D) | |
555 | repartition = { k: { date: v } for k, v in repartition.items() } | |
556 | ||
557 | cls.data.set("") | |
558 | cls.last_date.set(date) | |
559 | cls.liquidities.set(repartition) | |
560 | ||
561 | @classmethod | |
562 | def store_cryptoportfolio(cls): | |
563 | if dbs.redis_connected(): | |
564 | hash_ = {} | |
565 | for liquidity, repartitions in cls.liquidities.items(): | |
566 | hash_[liquidity] = repartitions[cls.last_date.get()] | |
567 | dump = json.dumps(hash_) | |
568 | key = "/cryptoportfolio/repartition/latest" | |
569 | dbs.redis.set(key, dump) | |
570 | key = "/cryptoportfolio/repartition/date" | |
571 | dbs.redis.set(key, cls.last_date.date().isoformat()) | |
572 | ||
573 | @classmethod | |
574 | def parse_cryptoportfolio(cls): | |
575 | def filter_weights(weight_hash): | |
576 | if weight_hash[1][0] == 0: | |
577 | return False | |
578 | if weight_hash[0] == "_row": | |
579 | return False | |
580 | return True | |
581 | ||
582 | def clean_weights(i): | |
583 | def clean_weights_(h): | |
584 | if h[0].endswith("s"): | |
585 | return [h[0][0:-1], (h[1][i], "short")] | |
586 | else: | |
587 | return [h[0], (h[1][i], "long")] | |
588 | return clean_weights_ | |
589 | ||
590 | def parse_weights(portfolio_hash): | |
591 | if "weights" not in portfolio_hash: | |
592 | return {} | |
593 | weights_hash = portfolio_hash["weights"] | |
594 | weights = {} | |
595 | for i in range(len(weights_hash["_row"])): | |
596 | date = datetime.datetime.strptime(weights_hash["_row"][i], "%Y-%m-%d") | |
597 | weights[date] = dict(filter( | |
598 | filter_weights, | |
599 | map(clean_weights(i), weights_hash.items()))) | |
600 | return weights | |
601 | ||
602 | high_liquidity = parse_weights(cls.data.get("portfolio_1")) | |
603 | medium_liquidity = parse_weights(cls.data.get("portfolio_2")) | |
604 | ||
605 | cls.liquidities.set({ | |
606 | "medium": medium_liquidity, | |
607 | "high": high_liquidity, | |
608 | }) | |
609 | cls.last_date.set(max( | |
610 | max(medium_liquidity.keys(), default=datetime.datetime(1, 1, 1)), | |
611 | max(high_liquidity.keys(), default=datetime.datetime(1, 1, 1)) | |
612 | )) | |
613 |