import asyncio from loguru import logger from typing import List, Dict, Optional import signal import sys from concurrent.futures import ThreadPoolExecutor import time from asyncio import Semaphore from config.settings import SYNC_CONFIG from .position_sync import PositionSync from .order_sync import OrderSync from .account_sync import AccountSync class SyncManager: """同步管理器(支持批量并发处理)""" def __init__(self): self.is_running = True self.sync_interval = SYNC_CONFIG['interval'] self.max_concurrent = int(os.getenv('MAX_CONCURRENT', '10')) # 最大并发数 # 初始化同步器 self.syncers = [] self.executor = ThreadPoolExecutor(max_workers=self.max_concurrent) self.semaphore = Semaphore(self.max_concurrent) # 控制并发数 if SYNC_CONFIG['enable_position_sync']: self.syncers.append(PositionSync()) logger.info("启用持仓同步") if SYNC_CONFIG['enable_order_sync']: self.syncers.append(OrderSync()) logger.info("启用订单同步") if SYNC_CONFIG['enable_account_sync']: self.syncers.append(AccountSync()) logger.info("启用账户信息同步") # 性能统计 self.stats = { 'total_accounts': 0, 'success_count': 0, 'error_count': 0, 'last_sync_time': 0, 'avg_sync_time': 0 } # 注册信号处理器 signal.signal(signal.SIGINT, self.signal_handler) signal.signal(signal.SIGTERM, self.signal_handler) async def _run_syncer_with_limit(self, syncer): """带并发限制的运行""" async with self.semaphore: return await self._run_syncer(syncer) def signal_handler(self, signum, frame): """信号处理器""" logger.info(f"接收到信号 {signum},正在关闭...") self.is_running = False def batch_process_accounts(self, accounts: Dict[str, Dict], batch_size: int = 100): """分批处理账号""" account_items = list(accounts.items()) for i in range(0, len(account_items), batch_size): batch = dict(account_items[i:i + batch_size]) # 处理这批账号 self._process_account_batch(batch) # 批次间休息,避免数据库压力过大 time.sleep(0.1) async def start(self): """启动同步服务""" logger.info(f"同步服务启动,间隔 {self.sync_interval} 秒,最大并发 {self.max_concurrent}") while self.is_running: try: start_time = time.time() # 执行所有同步器 tasks = [self._run_syncer(syncer) for syncer in self.syncers] results = await asyncio.gather(*tasks, return_exceptions=True) # 更新统计 sync_time = time.time() - start_time self.stats['last_sync_time'] = sync_time self.stats['avg_sync_time'] = (self.stats['avg_sync_time'] * 0.9 + sync_time * 0.1) # 打印统计信息 self._print_stats() logger.debug(f"同步完成,耗时 {sync_time:.2f} 秒,等待 {self.sync_interval} 秒") await asyncio.sleep(self.sync_interval) except asyncio.CancelledError: logger.info("同步任务被取消") break except Exception as e: logger.error(f"同步任务异常: {e}") self.stats['error_count'] += 1 await asyncio.sleep(30) # 出错后等待30秒 async def _run_syncer(self, syncer): """运行单个同步器""" try: # 获取所有账号 accounts = syncer.get_accounts_from_redis() self.stats['total_accounts'] = len(accounts) if not accounts: logger.warning("未获取到任何账号") return # 批量处理账号 await syncer.sync_batch(accounts) self.stats['success_count'] += 1 except Exception as e: logger.error(f"同步器 {syncer.__class__.__name__} 执行失败: {e}") self.stats['error_count'] += 1 def _print_stats(self): """打印统计信息""" stats_str = ( f"统计: 账号数={self.stats['total_accounts']}, " f"成功={self.stats['success_count']}, " f"失败={self.stats['error_count']}, " f"本次耗时={self.stats['last_sync_time']:.2f}s, " f"平均耗时={self.stats['avg_sync_time']:.2f}s" ) logger.info(stats_str) async def stop(self): """停止同步服务""" self.is_running = False self.executor.shutdown(wait=True) logger.info("同步服务停止")