auto_trade_sys/trading_system/book_ticker_stream.py
薇薇安 f4feea6b87 feat(ticker_stream, book_ticker_stream): 优化内存管理与Redis写入逻辑
在 `ticker_24h_stream.py` 和 `book_ticker_stream.py` 中引入新的内存管理机制,限制进程内缓存的最大条数为 500,避免内存无限增长。更新 Redis 写入逻辑,确保在有 Redis 时优先写入 Redis,而不在进程内存中常驻数据。通过定期从 Redis 拉取数据并合并,提升了系统的内存使用效率与稳定性,同时优化了日志记录以减少高负载时的输出频率。此改动进一步增强了系统性能与资源管理能力。
2026-02-19 00:34:35 +08:00

239 lines
9.2 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""
最优挂单 WebSocket 流:订阅 !bookTicker维护全市场最优买/卖价缓存。
用于滑点估算、入场价格优化,提升交易执行效果。
支持多进程共用Leader 写 Redis所有进程通过 refresh_book_ticker_from_redis_loop 从 Redis 更新本地缓存。
文档:更新速度 5s推送所有交易对的最优挂单最高买单、最低卖单
"""
import asyncio
import json
import logging
import time
from typing import Dict, Optional, Any
logger = logging.getLogger(__name__)
try:
from .market_ws_leader import KEY_BOOK_TICKER
except ImportError:
KEY_BOOK_TICKER = "market:book_ticker"
# 最优挂单缓存symbol -> { bidPrice, bidQty, askPrice, askQty, time }
# 有 Redis 时由 refresh 循环从 Redis 回填,不在此无限累积
_book_ticker_cache: Dict[str, Dict[str, Any]] = {}
_book_ticker_updated_at: float = 0.0
_BOOK_TICKER_CACHE_MAX_KEYS = 500 # 进程内存最多保留 500 个
def get_book_ticker_cache() -> Dict[str, Dict[str, Any]]:
"""返回当前最优挂单缓存。"""
return dict(_book_ticker_cache)
def get_book_ticker(symbol: str) -> Optional[Dict[str, Any]]:
"""获取指定交易对的最优挂单;无缓存时返回 None。"""
return _book_ticker_cache.get(symbol.upper())
def is_book_ticker_cache_fresh(max_age_sec: float = 10.0) -> bool:
"""缓存是否在 max_age_sec 秒内更新过且非空。"""
if not _book_ticker_cache:
return False
return (time.monotonic() - _book_ticker_updated_at) <= max_age_sec
def estimate_slippage(symbol: str, side: str, quantity: float) -> Optional[float]:
"""
估算滑点USDT基于最优挂单估算执行价格与标记价格的偏差。
Args:
symbol: 交易对
side: BUY/SELL
quantity: 下单数量
Returns:
估算滑点USDTNone 表示无法估算
"""
ticker = get_book_ticker(symbol)
if not ticker:
return None
try:
bid_price = float(ticker.get("bidPrice", 0))
ask_price = float(ticker.get("askPrice", 0))
bid_qty = float(ticker.get("bidQty", 0))
ask_qty = float(ticker.get("askQty", 0))
if bid_price <= 0 or ask_price <= 0:
return None
mid_price = (bid_price + ask_price) / 2
if side == "BUY":
# 买单:用 askPrice卖一估算滑点 = (askPrice - midPrice) * quantity
if ask_qty >= quantity:
slippage = (ask_price - mid_price) * quantity
else:
# 数量超过卖一,需要吃多档,粗略估算
slippage = (ask_price - mid_price) * quantity * 1.2
else: # SELL
# 卖单:用 bidPrice买一估算滑点 = (midPrice - bidPrice) * quantity
if bid_qty >= quantity:
slippage = (mid_price - bid_price) * quantity
else:
slippage = (mid_price - bid_price) * quantity * 1.2
return slippage
except Exception:
return None
class BookTickerStream:
"""订阅合约 !bookTicker持续更新 _book_ticker_cache。Leader 时可选写 Redis 供多进程读。"""
def __init__(self, testnet: bool = False, redis_cache: Any = None):
self.testnet = testnet
self._redis_cache = redis_cache
self._ws = None
self._task: Optional[asyncio.Task] = None
self._running = False
def _ws_url(self) -> str:
if self.testnet:
return "wss://stream.binancefuture.com/ws/!bookTicker"
return "wss://fstream.binance.com/ws/!bookTicker"
async def start(self) -> bool:
if self._running:
return True
self._running = True
self._task = asyncio.create_task(self._run_ws())
logger.info("BookTickerStream: 已启动(!bookTicker用于滑点估算")
return True
async def stop(self):
self._running = False
if self._task:
self._task.cancel()
try:
await self._task
except asyncio.CancelledError:
pass
self._task = None
if self._ws:
try:
await self._ws.close()
except Exception:
pass
self._ws = None
logger.info("BookTickerStream: 已停止")
async def _run_ws(self):
import aiohttp
while self._running:
url = self._ws_url()
try:
async with aiohttp.ClientSession() as session:
async with session.ws_connect(
url, heartbeat=50, timeout=aiohttp.ClientTimeout(total=15)
) as ws:
self._ws = ws
logger.info("BookTickerStream: WS 已连接")
async for msg in ws:
if not self._running:
break
if msg.type == aiohttp.WSMsgType.TEXT:
self._handle_message(msg.data)
elif msg.type in (aiohttp.WSMsgType.ERROR, aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSE):
break
except asyncio.CancelledError:
break
except Exception as e:
err_msg = getattr(e, "message", str(e)) or repr(e)
err_type = type(e).__name__
logger.warning(
"BookTickerStream: WS 异常 %s: %s10s 后重连",
err_type, err_msg,
exc_info=logger.isEnabledFor(logging.DEBUG),
)
await asyncio.sleep(10)
self._ws = None
if not self._running:
break
def _handle_message(self, raw: str):
global _book_ticker_cache, _book_ticker_updated_at
try:
data = json.loads(raw)
except Exception:
return
if isinstance(data, dict) and "stream" in data:
ticker_data = data.get("data", {})
else:
ticker_data = data
if not isinstance(ticker_data, dict) or ticker_data.get("e") != "bookTicker":
return
s = (ticker_data.get("s") or "").strip().upper()
if not s or not s.endswith("USDT"):
return
try:
item = {
"symbol": s,
"bidPrice": float(ticker_data.get("b", 0)),
"bidQty": float(ticker_data.get("B", 0)),
"askPrice": float(ticker_data.get("a", 0)),
"askQty": float(ticker_data.get("A", 0)),
"time": int(ticker_data.get("T", 0)),
}
except (TypeError, ValueError):
return
# 有 Redis 时只写 Redis不写进程内存由 refresh 循环回填)
if self._redis_cache:
try:
asyncio.get_event_loop().create_task(
self._merge_and_write_book_ticker_to_redis(s, item)
)
except Exception as e:
logger.debug("BookTickerStream: 写入 Redis 调度失败 %s", e)
return
_book_ticker_cache[s] = item
_book_ticker_updated_at = time.monotonic()
if len(_book_ticker_cache) > _BOOK_TICKER_CACHE_MAX_KEYS:
keys = list(_book_ticker_cache.keys())
for k in keys[_BOOK_TICKER_CACHE_MAX_KEYS:]:
del _book_ticker_cache[k]
logger.debug(f"BookTickerStream: 已更新 {s} bid={item['bidPrice']:.4f} ask={item['askPrice']:.4f}")
async def _merge_and_write_book_ticker_to_redis(self, symbol: str, item: Dict[str, Any]) -> None:
"""从 Redis 读出、合并单条、写回,不占用进程内存常驻"""
try:
if not self._redis_cache:
return
existing = await self._redis_cache.get(KEY_BOOK_TICKER)
merged = dict(existing) if isinstance(existing, dict) else {}
merged[symbol] = item
await self._redis_cache.set(KEY_BOOK_TICKER, merged, ttl=30)
except Exception as e:
logger.debug("BookTickerStream: 写入 Redis 失败 %s", e)
async def refresh_book_ticker_from_redis_loop(redis_cache: Any, interval_sec: float = 2.0) -> None:
"""定期从 Redis 拉取 bookTicker 到本地缓存Leader 与非 Leader 都跑),并限制条数。"""
global _book_ticker_cache, _book_ticker_updated_at
if redis_cache is None:
return
while True:
try:
await asyncio.sleep(interval_sec)
data = await redis_cache.get(KEY_BOOK_TICKER)
if data and isinstance(data, dict):
_book_ticker_cache.update(data)
if len(_book_ticker_cache) > _BOOK_TICKER_CACHE_MAX_KEYS:
keys = list(_book_ticker_cache.keys())
for k in keys[_BOOK_TICKER_CACHE_MAX_KEYS:]:
del _book_ticker_cache[k]
_book_ticker_updated_at = time.monotonic()
logger.debug("BookTicker: 从 Redis 刷新 %s 个交易对", len(_book_ticker_cache))
except asyncio.CancelledError:
break
except Exception as e:
logger.debug("BookTicker: 从 Redis 刷新失败 %s", e)