1
This commit is contained in:
174
utils/batch_account_sync.py
Normal file
174
utils/batch_account_sync.py
Normal file
@@ -0,0 +1,174 @@
|
||||
from typing import List, Dict, Any, Tuple
|
||||
from loguru import logger
|
||||
from sqlalchemy import text
|
||||
import time
|
||||
|
||||
class BatchAccountSync:
|
||||
"""账户信息批量同步工具"""
|
||||
|
||||
def __init__(self, db_manager):
|
||||
self.db_manager = db_manager
|
||||
|
||||
def sync_accounts_batch(self, all_account_data: List[Dict]) -> Tuple[int, int]:
|
||||
"""批量同步账户信息(最高效版本)"""
|
||||
if not all_account_data:
|
||||
return 0, 0
|
||||
|
||||
session = self.db_manager.get_session()
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# 方法1:使用临时表进行批量操作(性能最好)
|
||||
updated_count, inserted_count = self._sync_using_temp_table(session, all_account_data)
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
logger.info(f"账户信息批量同步完成: 更新 {updated_count} 条,插入 {inserted_count} 条,耗时 {elapsed:.2f}秒")
|
||||
|
||||
return updated_count, inserted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"账户信息批量同步失败: {e}")
|
||||
return 0, 0
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def _sync_using_temp_table(self, session, all_account_data: List[Dict]) -> Tuple[int, int]:
|
||||
"""使用临时表进行批量同步"""
|
||||
try:
|
||||
# 1. 创建临时表
|
||||
session.execute(text("""
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_account_info (
|
||||
st_id INT,
|
||||
k_id INT,
|
||||
asset VARCHAR(32),
|
||||
balance DECIMAL(20, 8),
|
||||
withdrawal DECIMAL(20, 8),
|
||||
deposit DECIMAL(20, 8),
|
||||
other DECIMAL(20, 8),
|
||||
profit DECIMAL(20, 8),
|
||||
time INT,
|
||||
PRIMARY KEY (k_id, st_id, time)
|
||||
)
|
||||
"""))
|
||||
|
||||
# 2. 清空临时表
|
||||
session.execute(text("TRUNCATE TABLE temp_account_info"))
|
||||
|
||||
# 3. 批量插入数据到临时表
|
||||
chunk_size = 1000
|
||||
total_inserted = 0
|
||||
|
||||
for i in range(0, len(all_account_data), chunk_size):
|
||||
chunk = all_account_data[i:i + chunk_size]
|
||||
|
||||
values_list = []
|
||||
for data in chunk:
|
||||
values = (
|
||||
f"({data['st_id']}, {data['k_id']}, 'USDT', "
|
||||
f"{data['balance']}, {data['withdrawal']}, {data['deposit']}, "
|
||||
f"{data['other']}, {data['profit']}, {data['time']})"
|
||||
)
|
||||
values_list.append(values)
|
||||
|
||||
if values_list:
|
||||
values_str = ", ".join(values_list)
|
||||
sql = f"""
|
||||
INSERT INTO temp_account_info
|
||||
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time)
|
||||
VALUES {values_str}
|
||||
"""
|
||||
session.execute(text(sql))
|
||||
total_inserted += len(chunk)
|
||||
|
||||
# 4. 使用临时表更新主表
|
||||
# 更新已存在的记录
|
||||
update_result = session.execute(text("""
|
||||
UPDATE deh_strategy_kx_new main
|
||||
INNER JOIN temp_account_info temp
|
||||
ON main.k_id = temp.k_id
|
||||
AND main.st_id = temp.st_id
|
||||
AND main.time = temp.time
|
||||
SET main.balance = temp.balance,
|
||||
main.withdrawal = temp.withdrawal,
|
||||
main.deposit = temp.deposit,
|
||||
main.other = temp.other,
|
||||
main.profit = temp.profit,
|
||||
main.up_time = NOW()
|
||||
"""))
|
||||
updated_count = update_result.rowcount
|
||||
|
||||
# 插入新记录
|
||||
insert_result = session.execute(text("""
|
||||
INSERT INTO deh_strategy_kx_new
|
||||
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time, up_time)
|
||||
SELECT
|
||||
st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time, NOW()
|
||||
FROM temp_account_info temp
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM deh_strategy_kx_new main
|
||||
WHERE main.k_id = temp.k_id
|
||||
AND main.st_id = temp.st_id
|
||||
AND main.time = temp.time
|
||||
)
|
||||
"""))
|
||||
inserted_count = insert_result.rowcount
|
||||
|
||||
# 5. 删除临时表
|
||||
session.execute(text("DROP TEMPORARY TABLE IF EXISTS temp_account_info"))
|
||||
|
||||
session.commit()
|
||||
|
||||
return updated_count, inserted_count
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"临时表同步失败: {e}")
|
||||
raise
|
||||
|
||||
def _sync_using_on_duplicate(self, session, all_account_data: List[Dict]) -> Tuple[int, int]:
|
||||
"""使用ON DUPLICATE KEY UPDATE批量同步(简化版)"""
|
||||
try:
|
||||
# 分块执行,避免SQL过长
|
||||
chunk_size = 1000
|
||||
total_processed = 0
|
||||
|
||||
for i in range(0, len(all_account_data), chunk_size):
|
||||
chunk = all_account_data[i:i + chunk_size]
|
||||
|
||||
values_list = []
|
||||
for data in chunk:
|
||||
values = (
|
||||
f"({data['st_id']}, {data['k_id']}, 'USDT', "
|
||||
f"{data['balance']}, {data['withdrawal']}, {data['deposit']}, "
|
||||
f"{data['other']}, {data['profit']}, {data['time']})"
|
||||
)
|
||||
values_list.append(values)
|
||||
|
||||
if values_list:
|
||||
values_str = ", ".join(values_list)
|
||||
|
||||
sql = f"""
|
||||
INSERT INTO deh_strategy_kx_new
|
||||
(st_id, k_id, asset, balance, withdrawal, deposit, other, profit, time)
|
||||
VALUES {values_str}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
balance = VALUES(balance),
|
||||
withdrawal = VALUES(withdrawal),
|
||||
deposit = VALUES(deposit),
|
||||
other = VALUES(other),
|
||||
profit = VALUES(profit),
|
||||
up_time = NOW()
|
||||
"""
|
||||
|
||||
result = session.execute(text(sql))
|
||||
total_processed += len(chunk)
|
||||
|
||||
session.commit()
|
||||
|
||||
# 注意:这里无法区分更新和插入的数量
|
||||
return total_processed, 0
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"ON DUPLICATE同步失败: {e}")
|
||||
raise
|
||||
313
utils/batch_order_sync.py
Normal file
313
utils/batch_order_sync.py
Normal file
@@ -0,0 +1,313 @@
|
||||
from typing import List, Dict, Any, Tuple
|
||||
from loguru import logger
|
||||
from sqlalchemy import text
|
||||
import time
|
||||
|
||||
class BatchOrderSync:
|
||||
"""订单数据批量同步工具(最高性能)"""
|
||||
|
||||
def __init__(self, db_manager, batch_size: int = 1000):
|
||||
self.db_manager = db_manager
|
||||
self.batch_size = batch_size
|
||||
|
||||
def sync_orders_batch(self, all_orders: List[Dict]) -> Tuple[bool, int]:
|
||||
"""批量同步订单数据"""
|
||||
if not all_orders:
|
||||
return True, 0
|
||||
|
||||
session = self.db_manager.get_session()
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# 方法1:使用临时表(性能最好)
|
||||
processed_count = self._sync_using_temp_table(session, all_orders)
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
logger.info(f"订单批量同步完成: 处理 {processed_count} 条订单,耗时 {elapsed:.2f}秒")
|
||||
|
||||
return True, processed_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"订单批量同步失败: {e}")
|
||||
return False, 0
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def _sync_using_temp_table(self, session, all_orders: List[Dict]) -> int:
|
||||
"""使用临时表批量同步订单"""
|
||||
try:
|
||||
# 1. 创建临时表
|
||||
session.execute(text("""
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_orders (
|
||||
st_id INT,
|
||||
k_id INT,
|
||||
asset VARCHAR(32),
|
||||
order_id VARCHAR(765),
|
||||
symbol VARCHAR(120),
|
||||
side VARCHAR(120),
|
||||
price FLOAT,
|
||||
time INT,
|
||||
order_qty FLOAT,
|
||||
last_qty FLOAT,
|
||||
avg_price FLOAT,
|
||||
exchange_id INT,
|
||||
UNIQUE KEY idx_unique_order (order_id, symbol, k_id, side)
|
||||
)
|
||||
"""))
|
||||
|
||||
# 2. 清空临时表
|
||||
session.execute(text("TRUNCATE TABLE temp_orders"))
|
||||
|
||||
# 3. 批量插入数据到临时表(分块)
|
||||
inserted_count = self._batch_insert_to_temp_table(session, all_orders)
|
||||
|
||||
if inserted_count == 0:
|
||||
session.execute(text("DROP TEMPORARY TABLE IF EXISTS temp_orders"))
|
||||
return 0
|
||||
|
||||
# 4. 使用临时表更新主表
|
||||
# 更新已存在的记录(只更新需要比较的字段)
|
||||
update_result = session.execute(text("""
|
||||
UPDATE deh_strategy_order_new main
|
||||
INNER JOIN temp_orders temp
|
||||
ON main.order_id = temp.order_id
|
||||
AND main.symbol = temp.symbol
|
||||
AND main.k_id = temp.k_id
|
||||
AND main.side = temp.side
|
||||
SET main.side = temp.side,
|
||||
main.price = temp.price,
|
||||
main.time = temp.time,
|
||||
main.order_qty = temp.order_qty,
|
||||
main.last_qty = temp.last_qty,
|
||||
main.avg_price = temp.avg_price
|
||||
WHERE main.side != temp.side
|
||||
OR main.price != temp.price
|
||||
OR main.time != temp.time
|
||||
OR main.order_qty != temp.order_qty
|
||||
OR main.last_qty != temp.last_qty
|
||||
OR main.avg_price != temp.avg_price
|
||||
"""))
|
||||
updated_count = update_result.rowcount
|
||||
|
||||
# 插入新记录
|
||||
insert_result = session.execute(text("""
|
||||
INSERT INTO deh_strategy_order_new
|
||||
(st_id, k_id, asset, order_id, symbol, side, price, time,
|
||||
order_qty, last_qty, avg_price, exchange_id)
|
||||
SELECT
|
||||
st_id, k_id, asset, order_id, symbol, side, price, time,
|
||||
order_qty, last_qty, avg_price, exchange_id
|
||||
FROM temp_orders temp
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM deh_strategy_order_new main
|
||||
WHERE main.order_id = temp.order_id
|
||||
AND main.symbol = temp.symbol
|
||||
AND main.k_id = temp.k_id
|
||||
AND main.side = temp.side
|
||||
)
|
||||
"""))
|
||||
inserted_count = insert_result.rowcount
|
||||
|
||||
# 5. 删除临时表
|
||||
session.execute(text("DROP TEMPORARY TABLE IF EXISTS temp_orders"))
|
||||
|
||||
session.commit()
|
||||
|
||||
total_processed = updated_count + inserted_count
|
||||
logger.info(f"订单批量同步: 更新 {updated_count} 条,插入 {inserted_count} 条")
|
||||
|
||||
return total_processed
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"临时表同步订单失败: {e}")
|
||||
raise
|
||||
|
||||
def _batch_insert_to_temp_table(self, session, all_orders: List[Dict]) -> int:
|
||||
"""批量插入数据到临时表"""
|
||||
total_inserted = 0
|
||||
|
||||
try:
|
||||
# 分块处理
|
||||
for i in range(0, len(all_orders), self.batch_size):
|
||||
chunk = all_orders[i:i + self.batch_size]
|
||||
|
||||
values_list = []
|
||||
for order in chunk:
|
||||
try:
|
||||
# 处理NULL值
|
||||
price = order.get('price')
|
||||
time_val = order.get('time')
|
||||
order_qty = order.get('order_qty')
|
||||
last_qty = order.get('last_qty')
|
||||
avg_price = order.get('avg_price')
|
||||
# 转义单引号
|
||||
symbol = order.get('symbol').replace("'", "''") if order.get('symbol') else ''
|
||||
order_id = order.get('order_id').replace("'", "''") if order.get('order_id') else ''
|
||||
|
||||
values = (
|
||||
f"({order['st_id']}, {order['k_id']}, '{order.get('asset', 'USDT')}', "
|
||||
f"'{order_id}', "
|
||||
f"'{symbol}', "
|
||||
f"'{order['side']}', "
|
||||
f"{price if price is not None else 'NULL'}, "
|
||||
f"{time_val if time_val is not None else 'NULL'}, "
|
||||
f"{order_qty if order_qty is not None else 'NULL'}, "
|
||||
f"{last_qty if last_qty is not None else 'NULL'}, "
|
||||
f"{avg_price if avg_price is not None else 'NULL'}, "
|
||||
"NULL)"
|
||||
)
|
||||
values_list.append(values)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"构建订单值失败: {order}, error={e}")
|
||||
continue
|
||||
|
||||
if values_list:
|
||||
values_str = ", ".join(values_list)
|
||||
|
||||
sql = f"""
|
||||
INSERT INTO temp_orders
|
||||
(st_id, k_id, asset, order_id, symbol, side, price, time,
|
||||
order_qty, last_qty, avg_price, exchange_id)
|
||||
VALUES {values_str}
|
||||
"""
|
||||
|
||||
result = session.execute(text(sql))
|
||||
total_inserted += len(chunk)
|
||||
|
||||
return total_inserted
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量插入临时表失败: {e}")
|
||||
raise
|
||||
|
||||
def _batch_insert_to_temp_table1(self, session, all_orders: List[Dict]) -> int:
|
||||
"""批量插入数据到临时表(使用参数化查询)temp_orders"""
|
||||
total_inserted = 0
|
||||
|
||||
try:
|
||||
# 分块处理
|
||||
for i in range(0, len(all_orders), self.batch_size):
|
||||
chunk = all_orders[i:i + self.batch_size]
|
||||
|
||||
# 准备参数化数据
|
||||
insert_data = []
|
||||
for order in chunk:
|
||||
try:
|
||||
insert_data.append({
|
||||
'st_id': order['st_id'],
|
||||
'k_id': order['k_id'],
|
||||
'asset': order.get('asset', 'USDT'),
|
||||
'order_id': order['order_id'],
|
||||
'symbol': order['symbol'],
|
||||
'side': order['side'],
|
||||
'price': order.get('price'),
|
||||
'time': order.get('time'),
|
||||
'order_qty': order.get('order_qty'),
|
||||
'last_qty': order.get('last_qty'),
|
||||
'avg_price': order.get('avg_price')
|
||||
# exchange_id 留空,使用默认值NULL
|
||||
})
|
||||
except KeyError as e:
|
||||
logger.error(f"订单数据缺少必要字段: {order}, missing={e}")
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.error(f"处理订单数据失败: {order}, error={e}")
|
||||
continue
|
||||
|
||||
if insert_data:
|
||||
sql = text(f"""
|
||||
INSERT INTO {self.temp_table_name}
|
||||
(st_id, k_id, asset, order_id, symbol, side, price, time,
|
||||
order_qty, last_qty, avg_price)
|
||||
VALUES
|
||||
(:st_id, :k_id, :asset, :order_id, :symbol, :side, :price, :time,
|
||||
:order_qty, :last_qty, :avg_price)
|
||||
""")
|
||||
|
||||
try:
|
||||
session.execute(sql, insert_data)
|
||||
session.commit()
|
||||
total_inserted += len(insert_data)
|
||||
logger.debug(f"插入 {len(insert_data)} 条数据到临时表")
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"执行批量插入失败: {e}")
|
||||
raise
|
||||
|
||||
logger.info(f"总共插入 {total_inserted} 条数据到临时表")
|
||||
return total_inserted
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量插入临时表失败: {e}")
|
||||
session.rollback()
|
||||
raise
|
||||
|
||||
|
||||
def _sync_using_on_duplicate(self, session, all_orders: List[Dict]) -> int:
|
||||
"""使用ON DUPLICATE KEY UPDATE批量同步(简化版)"""
|
||||
try:
|
||||
total_processed = 0
|
||||
|
||||
# 分块执行
|
||||
for i in range(0, len(all_orders), self.batch_size):
|
||||
chunk = all_orders[i:i + self.batch_size]
|
||||
|
||||
values_list = []
|
||||
for order in chunk:
|
||||
try:
|
||||
# 处理NULL值
|
||||
price = order.get('price')
|
||||
time_val = order.get('time')
|
||||
order_qty = order.get('order_qty')
|
||||
last_qty = order.get('last_qty')
|
||||
avg_price = order.get('avg_price')
|
||||
symbol = order.get('symbol').replace("'", "''") if order.get('symbol') else ''
|
||||
order_id = order.get('order_id').replace("'", "''") if order.get('order_id') else ''
|
||||
|
||||
values = (
|
||||
f"({order['st_id']}, {order['k_id']}, '{order.get('asset', 'USDT')}', "
|
||||
f"'{order_id}', "
|
||||
f"'{symbol}', "
|
||||
f"'{order['side']}', "
|
||||
f"{price if price is not None else 'NULL'}, "
|
||||
f"{time_val if time_val is not None else 'NULL'}, "
|
||||
f"{order_qty if order_qty is not None else 'NULL'}, "
|
||||
f"{last_qty if last_qty is not None else 'NULL'}, "
|
||||
f"{avg_price if avg_price is not None else 'NULL'}, "
|
||||
"NULL)"
|
||||
)
|
||||
values_list.append(values)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"构建订单值失败: {order}, error={e}")
|
||||
continue
|
||||
|
||||
if values_list:
|
||||
values_str = ", ".join(values_list)
|
||||
|
||||
sql = f"""
|
||||
INSERT INTO deh_strategy_order_new
|
||||
(st_id, k_id, asset, order_id, symbol, side, price, time,
|
||||
order_qty, last_qty, avg_price, exchange_id)
|
||||
VALUES {values_str}
|
||||
ON DUPLICATE KEY UPDATE
|
||||
side = VALUES(side),
|
||||
price = VALUES(price),
|
||||
time = VALUES(time),
|
||||
order_qty = VALUES(order_qty),
|
||||
last_qty = VALUES(last_qty),
|
||||
avg_price = VALUES(avg_price)
|
||||
"""
|
||||
|
||||
session.execute(text(sql))
|
||||
total_processed += len(chunk)
|
||||
|
||||
session.commit()
|
||||
return total_processed
|
||||
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
logger.error(f"ON DUPLICATE同步订单失败: {e}")
|
||||
raise
|
||||
254
utils/batch_position_sync.py
Normal file
254
utils/batch_position_sync.py
Normal file
@@ -0,0 +1,254 @@
|
||||
from typing import List, Dict, Any, Tuple
|
||||
from loguru import logger
|
||||
from sqlalchemy import text
|
||||
import time
|
||||
|
||||
class BatchPositionSync:
|
||||
"""持仓数据批量同步工具(使用临时表,最高性能)"""
|
||||
|
||||
def __init__(self, db_manager, batch_size: int = 500):
|
||||
self.db_manager = db_manager
|
||||
self.batch_size = batch_size
|
||||
|
||||
def sync_positions_batch(self, all_positions: List[Dict]) -> Tuple[bool, Dict]:
|
||||
"""批量同步持仓数据(最高效版本)"""
|
||||
if not all_positions:
|
||||
return True, {'total': 0, 'updated': 0, 'inserted': 0, 'deleted': 0}
|
||||
|
||||
session = self.db_manager.get_session()
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# 按账号分组
|
||||
positions_by_account = self._group_positions_by_account(all_positions)
|
||||
|
||||
total_stats = {'total': 0, 'updated': 0, 'inserted': 0, 'deleted': 0}
|
||||
|
||||
with session.begin():
|
||||
# 处理每个账号
|
||||
for (k_id, st_id), positions in positions_by_account.items():
|
||||
success, stats = self._sync_account_using_temp_table(
|
||||
session, k_id, st_id, positions
|
||||
)
|
||||
|
||||
if success:
|
||||
total_stats['total'] += stats['total']
|
||||
total_stats['updated'] += stats['updated']
|
||||
total_stats['inserted'] += stats['inserted']
|
||||
total_stats['deleted'] += stats['deleted']
|
||||
|
||||
elapsed = time.time() - start_time
|
||||
logger.info(f"持仓批量同步完成: 处理 {len(positions_by_account)} 个账号,"
|
||||
f"总持仓 {total_stats['total']} 条,耗时 {elapsed:.2f}秒")
|
||||
|
||||
return True, total_stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"持仓批量同步失败: {e}")
|
||||
return False, {'total': 0, 'updated': 0, 'inserted': 0, 'deleted': 0}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def _group_positions_by_account(self, all_positions: List[Dict]) -> Dict[Tuple[int, int], List[Dict]]:
|
||||
"""按账号分组持仓数据"""
|
||||
groups = {}
|
||||
for position in all_positions:
|
||||
k_id = position.get('k_id')
|
||||
st_id = position.get('st_id', 0)
|
||||
key = (k_id, st_id)
|
||||
|
||||
if key not in groups:
|
||||
groups[key] = []
|
||||
groups[key].append(position)
|
||||
|
||||
return groups
|
||||
|
||||
def _sync_account_using_temp_table(self, session, k_id: int, st_id: int, positions: List[Dict]) -> Tuple[bool, Dict]:
|
||||
"""使用临时表同步单个账号的持仓数据"""
|
||||
try:
|
||||
# 1. 创建临时表
|
||||
session.execute(text("""
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS temp_positions (
|
||||
st_id INT,
|
||||
k_id INT,
|
||||
asset VARCHAR(32),
|
||||
symbol VARCHAR(50),
|
||||
side VARCHAR(10),
|
||||
price FLOAT,
|
||||
`sum` FLOAT,
|
||||
asset_num DECIMAL(20, 8),
|
||||
asset_profit DECIMAL(20, 8),
|
||||
leverage INT,
|
||||
uptime INT,
|
||||
profit_price DECIMAL(20, 8),
|
||||
stop_price DECIMAL(20, 8),
|
||||
liquidation_price DECIMAL(20, 8),
|
||||
PRIMARY KEY (k_id, st_id, symbol, side)
|
||||
)
|
||||
"""))
|
||||
|
||||
# 2. 清空临时表
|
||||
session.execute(text("TRUNCATE TABLE temp_positions"))
|
||||
|
||||
# 3. 批量插入数据到临时表
|
||||
self._batch_insert_to_temp_table(session, positions)
|
||||
|
||||
# 4. 使用临时表更新主表
|
||||
# 更新已存在的记录
|
||||
update_result = session.execute(text(f"""
|
||||
UPDATE deh_strategy_position_new main
|
||||
INNER JOIN temp_positions temp
|
||||
ON main.k_id = temp.k_id
|
||||
AND main.st_id = temp.st_id
|
||||
AND main.symbol = temp.symbol
|
||||
AND main.side = temp.side
|
||||
SET main.price = temp.price,
|
||||
main.`sum` = temp.`sum`,
|
||||
main.asset_num = temp.asset_num,
|
||||
main.asset_profit = temp.asset_profit,
|
||||
main.leverage = temp.leverage,
|
||||
main.uptime = temp.uptime,
|
||||
main.profit_price = temp.profit_price,
|
||||
main.stop_price = temp.stop_price,
|
||||
main.liquidation_price = temp.liquidation_price
|
||||
WHERE main.k_id = {k_id} AND main.st_id = {st_id}
|
||||
"""))
|
||||
updated_count = update_result.rowcount
|
||||
|
||||
# 插入新记录
|
||||
insert_result = session.execute(text(f"""
|
||||
INSERT INTO deh_strategy_position_new
|
||||
(st_id, k_id, asset, symbol, side, price, `sum`,
|
||||
asset_num, asset_profit, leverage, uptime,
|
||||
profit_price, stop_price, liquidation_price)
|
||||
SELECT
|
||||
st_id, k_id, asset, symbol, side, price, `sum`,
|
||||
asset_num, asset_profit, leverage, uptime,
|
||||
profit_price, stop_price, liquidation_price
|
||||
FROM temp_positions temp
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM deh_strategy_position_new main
|
||||
WHERE main.k_id = temp.k_id
|
||||
AND main.st_id = temp.st_id
|
||||
AND main.symbol = temp.symbol
|
||||
AND main.side = temp.side
|
||||
)
|
||||
AND temp.k_id = {k_id} AND temp.st_id = {st_id}
|
||||
"""))
|
||||
inserted_count = insert_result.rowcount
|
||||
|
||||
# 5. 删除多余持仓(在临时表中不存在但在主表中存在的)
|
||||
delete_result = session.execute(text(f"""
|
||||
DELETE main
|
||||
FROM deh_strategy_position_new main
|
||||
LEFT JOIN temp_positions temp
|
||||
ON main.k_id = temp.k_id
|
||||
AND main.st_id = temp.st_id
|
||||
AND main.symbol = temp.symbol
|
||||
AND main.side = temp.side
|
||||
WHERE main.k_id = {k_id} AND main.st_id = {st_id}
|
||||
AND temp.symbol IS NULL
|
||||
"""))
|
||||
deleted_count = delete_result.rowcount
|
||||
|
||||
# 6. 删除临时表
|
||||
session.execute(text("DROP TEMPORARY TABLE IF EXISTS temp_positions"))
|
||||
|
||||
stats = {
|
||||
'total': len(positions),
|
||||
'updated': updated_count,
|
||||
'inserted': inserted_count,
|
||||
'deleted': deleted_count
|
||||
}
|
||||
|
||||
logger.debug(f"账号({k_id},{st_id})持仓同步: 更新{updated_count} 插入{inserted_count} 删除{deleted_count}")
|
||||
|
||||
return True, stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"临时表同步账号({k_id},{st_id})持仓失败: {e}")
|
||||
return False, {'total': 0, 'updated': 0, 'inserted': 0, 'deleted': 0}
|
||||
|
||||
def _batch_insert_to_temp_table(self, session, positions: List[Dict]):
|
||||
"""批量插入数据到临时表(使用参数化查询)"""
|
||||
if not positions:
|
||||
return
|
||||
|
||||
# 分块处理
|
||||
for i in range(0, len(positions), self.batch_size):
|
||||
chunk = positions[i:i + self.batch_size]
|
||||
|
||||
# 准备参数化数据
|
||||
insert_data = []
|
||||
for position in chunk:
|
||||
try:
|
||||
data = self._convert_position_for_temp(position)
|
||||
if not all([data.get('symbol'), data.get('side')]):
|
||||
continue
|
||||
|
||||
insert_data.append({
|
||||
'st_id': data['st_id'],
|
||||
'k_id': data['k_id'],
|
||||
'asset': data.get('asset', 'USDT'),
|
||||
'symbol': data['symbol'],
|
||||
'side': data['side'],
|
||||
'price': data.get('price'),
|
||||
'sum_val': data.get('sum'), # 注意字段名
|
||||
'asset_num': data.get('asset_num'),
|
||||
'asset_profit': data.get('asset_profit'),
|
||||
'leverage': data.get('leverage'),
|
||||
'uptime': data.get('uptime'),
|
||||
'profit_price': data.get('profit_price'),
|
||||
'stop_price': data.get('stop_price'),
|
||||
'liquidation_price': data.get('liquidation_price')
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"转换持仓数据失败: {position}, error={e}")
|
||||
continue
|
||||
|
||||
if insert_data:
|
||||
sql = """
|
||||
INSERT INTO temp_positions
|
||||
(st_id, k_id, asset, symbol, side, price, `sum`,
|
||||
asset_num, asset_profit, leverage, uptime,
|
||||
profit_price, stop_price, liquidation_price)
|
||||
VALUES
|
||||
(:st_id, :k_id, :asset, :symbol, :side, :price, :sum_val,
|
||||
:asset_num, :asset_profit, :leverage, :uptime,
|
||||
:profit_price, :stop_price, :liquidation_price)
|
||||
"""
|
||||
|
||||
session.execute(text(sql), insert_data)
|
||||
|
||||
def _convert_position_for_temp(self, data: Dict) -> Dict:
|
||||
"""转换持仓数据格式用于临时表"""
|
||||
# 使用安全转换
|
||||
def safe_float(value):
|
||||
try:
|
||||
return float(value) if value is not None else None
|
||||
except:
|
||||
return None
|
||||
|
||||
def safe_int(value):
|
||||
try:
|
||||
return int(value) if value is not None else None
|
||||
except:
|
||||
return None
|
||||
|
||||
return {
|
||||
'st_id': safe_int(data.get('st_id')) or 0,
|
||||
'k_id': safe_int(data.get('k_id')) or 0,
|
||||
'asset': data.get('asset', 'USDT'),
|
||||
'symbol': str(data.get('symbol', '')),
|
||||
'side': str(data.get('side', '')),
|
||||
'price': safe_float(data.get('price')),
|
||||
'sum': safe_float(data.get('qty')), # 注意:这里直接使用sum
|
||||
'asset_num': safe_float(data.get('asset_num')),
|
||||
'asset_profit': safe_float(data.get('asset_profit')),
|
||||
'leverage': safe_int(data.get('leverage')),
|
||||
'uptime': safe_int(data.get('uptime')),
|
||||
'profit_price': safe_float(data.get('profit_price')),
|
||||
'stop_price': safe_float(data.get('stop_price')),
|
||||
'liquidation_price': safe_float(data.get('liquidation_price'))
|
||||
}
|
||||
129
utils/redis_batch_helper.py
Normal file
129
utils/redis_batch_helper.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import redis
|
||||
from loguru import logger
|
||||
from typing import List, Dict, Tuple
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
class RedisBatchHelper:
|
||||
"""Redis批量数据获取助手"""
|
||||
|
||||
def __init__(self, redis_client):
|
||||
self.redis_client = redis_client
|
||||
|
||||
def get_recent_orders_batch(self, exchange_id: str, account_list: List[Tuple[int, int]],
|
||||
recent_days: int = 3) -> List[Dict]:
|
||||
"""批量获取多个账号的最近订单数据(优化内存使用)"""
|
||||
all_orders = []
|
||||
|
||||
try:
|
||||
# 分批处理账号,避免内存过大
|
||||
batch_size = 20 # 每批处理20个账号
|
||||
for i in range(0, len(account_list), batch_size):
|
||||
batch_accounts = account_list[i:i + batch_size]
|
||||
|
||||
# 并发获取这批账号的数据
|
||||
batch_orders = self._get_batch_accounts_orders(exchange_id, batch_accounts, recent_days)
|
||||
all_orders.extend(batch_orders)
|
||||
|
||||
# 批次间休息,避免Redis压力过大
|
||||
if i + batch_size < len(account_list):
|
||||
time.sleep(0.05)
|
||||
|
||||
logger.info(f"批量获取订单完成: {len(account_list)}个账号,{len(all_orders)}条订单")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量获取订单失败: {e}")
|
||||
|
||||
return all_orders
|
||||
|
||||
def _get_batch_accounts_orders(self, exchange_id: str, account_list: List[Tuple[int, int]],
|
||||
recent_days: int) -> List[Dict]:
|
||||
"""获取一批账号的订单数据"""
|
||||
batch_orders = []
|
||||
|
||||
try:
|
||||
# 计算最近日期
|
||||
today = datetime.now()
|
||||
recent_dates = []
|
||||
for i in range(recent_days):
|
||||
date = today - timedelta(days=i)
|
||||
recent_dates.append(date.strftime('%Y-%m-%d'))
|
||||
|
||||
# 为每个账号构建key列表
|
||||
all_keys = []
|
||||
key_to_account = {}
|
||||
|
||||
for k_id, st_id in account_list:
|
||||
redis_key = f"{exchange_id}:orders:{k_id}"
|
||||
|
||||
# 获取该账号的所有key
|
||||
try:
|
||||
account_keys = self.redis_client.hkeys(redis_key)
|
||||
|
||||
for key in account_keys:
|
||||
key_str = key.decode('utf-8') if isinstance(key, bytes) else key
|
||||
|
||||
if key_str == 'positions':
|
||||
continue
|
||||
|
||||
# 检查是否是最近日期
|
||||
for date_format in recent_dates:
|
||||
if key_str.startswith(date_format + '_'):
|
||||
all_keys.append((redis_key, key_str))
|
||||
key_to_account[(redis_key, key_str)] = (k_id, st_id)
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取账号 {k_id} 的key失败: {e}")
|
||||
continue
|
||||
|
||||
if not all_keys:
|
||||
return batch_orders
|
||||
|
||||
# 分批获取订单数据
|
||||
chunk_size = 500
|
||||
for i in range(0, len(all_keys), chunk_size):
|
||||
chunk = all_keys[i:i + chunk_size]
|
||||
|
||||
# 按redis_key分组,使用hmget批量获取
|
||||
keys_by_redis_key = {}
|
||||
for redis_key, key_str in chunk:
|
||||
if redis_key not in keys_by_redis_key:
|
||||
keys_by_redis_key[redis_key] = []
|
||||
keys_by_redis_key[redis_key].append(key_str)
|
||||
|
||||
# 为每个redis_key批量获取
|
||||
for redis_key, key_list in keys_by_redis_key.items():
|
||||
try:
|
||||
values = self.redis_client.hmget(redis_key, key_list)
|
||||
|
||||
for key_str, order_json in zip(key_list, values):
|
||||
if not order_json:
|
||||
continue
|
||||
|
||||
try:
|
||||
order = json.loads(order_json)
|
||||
|
||||
# 验证时间
|
||||
order_time = order.get('time', 0)
|
||||
if order_time >= int(time.time()) - recent_days * 24 * 3600:
|
||||
# 添加账号信息
|
||||
k_id, st_id = key_to_account.get((redis_key, key_str), (0, 0))
|
||||
order['k_id'] = k_id
|
||||
order['st_id'] = st_id
|
||||
order['exchange_id'] = exchange_id
|
||||
batch_orders.append(order)
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
logger.debug(f"解析订单JSON失败: key={key_str}, error={e}")
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量获取Redis数据失败: {redis_key}, error={e}")
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"获取批量账号订单失败: {e}")
|
||||
|
||||
return batch_orders
|
||||
Reference in New Issue
Block a user