mysql_pool.py 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/3/25 14:14
  5. import re
  6. import pymysql
  7. import YamlLoader
  8. from loguru import logger
  9. from dbutils.pooled_db import PooledDB
  10. # 获取yaml配置
  11. yaml = YamlLoader.readYaml()
  12. mysqlYaml = yaml.get("mysql")
  13. sql_host = mysqlYaml.getValueAsString("host")
  14. sql_port = mysqlYaml.getValueAsInt("port")
  15. sql_user = mysqlYaml.getValueAsString("username")
  16. sql_password = mysqlYaml.getValueAsString("password")
  17. sql_db = mysqlYaml.getValueAsString("db")
  18. class MySQLConnectionPool:
  19. """
  20. MySQL连接池
  21. """
  22. def __init__(self, mincached=1, maxcached=2, maxconnections=3, log=None):
  23. """
  24. 初始化连接池
  25. :param mincached: 初始化时,链接池中至少创建的链接,0表示不创建
  26. :param maxcached: 池中空闲连接的最大数目(0 或 None 表示池大小不受限制)
  27. :param maxconnections: 允许的最大连接数(0 或 None 表示任意数量的连接)
  28. :param log: 自定义日志记录器
  29. """
  30. # 使用 loguru 的 logger,如果传入了其他 logger,则使用传入的 logger
  31. self.log = log or logger
  32. self.pool = PooledDB(
  33. creator=pymysql,
  34. mincached=mincached,
  35. maxcached=maxcached,
  36. maxconnections=maxconnections,
  37. blocking=True, # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
  38. host=sql_host,
  39. port=sql_port,
  40. user=sql_user,
  41. password=sql_password,
  42. database=sql_db,
  43. charset="utf8mb4",
  44. use_unicode=True,
  45. init_command="SET NAMES utf8mb4",
  46. ping=1, # 0:完全关闭(更快), 1:仅在取连接时检查, 2:每次执行前检查连接有效性,防止使用已断开的连接
  47. connect_timeout=5, # 连接超时时间(秒)
  48. # read_timeout=30, # 读取超时时间(秒)
  49. write_timeout=30 # 写入超时时间(秒)
  50. )
  51. def _execute(self, query, args=None, commit=False):
  52. """
  53. 执行SQL
  54. :param query: SQL语句
  55. :param args: SQL参数
  56. :param commit: 是否提交事务
  57. :return: 查询结果
  58. """
  59. try:
  60. with self.pool.connection() as conn:
  61. with conn.cursor() as cursor:
  62. cursor.execute(query, args)
  63. if commit:
  64. conn.commit()
  65. self.log.debug(f"sql _execute, Query: {query}, Rows: {cursor.rowcount}")
  66. return cursor
  67. except Exception as e:
  68. if commit and conn:
  69. conn.rollback()
  70. self.log.exception(f"Error executing query: {e}, Query: {query}, Args: {args}")
  71. raise e
  72. def select_one(self, query, args=None):
  73. """
  74. 执行查询,返回单个结果
  75. :param query: 查询语句
  76. :param args: 查询参数
  77. :return: 查询结果
  78. """
  79. cursor = self._execute(query, args)
  80. return cursor.fetchone()
  81. def select_all(self, query, args=None):
  82. """
  83. 执行查询,返回所有结果
  84. :param query: 查询语句
  85. :param args: 查询参数
  86. :return: 查询结果
  87. """
  88. cursor = self._execute(query, args)
  89. return cursor.fetchall()
  90. def insert_one(self, query, args):
  91. """
  92. 执行单条插入语句
  93. :param query: 插入语句
  94. :param args: 插入参数
  95. """
  96. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  97. cursor = self._execute(query, args, commit=True)
  98. return cursor.lastrowid # 返回插入的ID
  99. def insert_all(self, query, args_list):
  100. """
  101. 执行批量插入语句,如果失败则逐条插入
  102. :param query: 插入语句
  103. :param args_list: 插入参数列表
  104. """
  105. conn = None
  106. cursor = None
  107. try:
  108. conn = self.pool.connection()
  109. cursor = conn.cursor()
  110. cursor.executemany(query, args_list)
  111. conn.commit()
  112. self.log.debug(f"sql insert_all, SQL: {query[:100]}..., Rows: {cursor.rowcount}")
  113. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_all 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  114. except pymysql.err.IntegrityError as e:
  115. if "Duplicate entry" in str(e):
  116. conn.rollback()
  117. self.log.warning(f"批量插入遇到重复,开始逐条插入。错误: {e}")
  118. rowcount = 0
  119. for args in args_list:
  120. try:
  121. self.insert_one(query, args)
  122. rowcount += 1
  123. except pymysql.err.IntegrityError as e2:
  124. if "Duplicate entry" in str(e2):
  125. self.log.debug(f"跳过重复条目: {e2}")
  126. else:
  127. self.log.error(f"插入失败: {e2}")
  128. except Exception as e2:
  129. self.log.error(f"插入失败: {e2}")
  130. self.log.info(f"逐条插入完成: {rowcount}/{len(args_list)}条")
  131. else:
  132. conn.rollback()
  133. self.log.exception(f"数据库完整性错误: {e}")
  134. raise e
  135. except Exception as e:
  136. conn.rollback()
  137. self.log.exception(f"批量插入失败: {e}")
  138. raise e
  139. finally:
  140. if cursor:
  141. cursor.close()
  142. if conn:
  143. conn.close()
  144. def insert_one_or_dict(self, table=None, data=None, query=None, args=None, commit=True, ignore=False):
  145. """
  146. 单条插入(支持字典或原始SQL)
  147. :param table: 表名(字典插入时必需)
  148. :param data: 字典数据 {列名: 值}
  149. :param query: 直接SQL语句(与data二选一)
  150. :param args: SQL参数(query使用时必需)
  151. :param commit: 是否自动提交
  152. :param ignore: 是否使用ignore
  153. :return: 最后插入ID
  154. """
  155. if data is not None:
  156. if not isinstance(data, dict):
  157. raise ValueError("Data must be a dictionary")
  158. keys = ', '.join([self._safe_identifier(k) for k in data.keys()])
  159. values = ', '.join(['%s'] * len(data))
  160. # 构建 INSERT IGNORE 语句
  161. ignore_clause = "IGNORE" if ignore else ""
  162. query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  163. args = tuple(data.values())
  164. elif query is None:
  165. raise ValueError("Either data or query must be provided")
  166. try:
  167. cursor = self._execute(query, args, commit)
  168. self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
  169. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  170. return cursor.lastrowid
  171. except pymysql.err.IntegrityError as e:
  172. if "Duplicate entry" in str(e):
  173. self.log.warning(f"插入失败:重复条目,已跳过。错误详情: {e}")
  174. return -1 # 返回 -1 表示重复条目被跳过
  175. else:
  176. self.log.exception(f"数据库完整性错误: {e}")
  177. raise
  178. except Exception as e:
  179. self.log.exception(f"未知错误: {e}")
  180. raise
  181. def insert_many(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True,
  182. ignore=False):
  183. """
  184. 批量插入(支持字典列表或原始SQL)
  185. :param table: 表名(字典插入时必需)
  186. :param data_list: 字典列表 [{列名: 值}]
  187. :param query: 直接SQL语句(与data_list二选一)
  188. :param args_list: SQL参数列表(query使用时必需)
  189. :param batch_size: 分批大小
  190. :param commit: 是否自动提交
  191. :param ignore: 是否使用ignore
  192. :return: 影响行数
  193. """
  194. if data_list is not None:
  195. if not data_list or not isinstance(data_list[0], dict):
  196. raise ValueError("Data_list must be a non-empty list of dictionaries")
  197. keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
  198. values = ', '.join(['%s'] * len(data_list[0]))
  199. # 构建 INSERT IGNORE 语句
  200. ignore_clause = "IGNORE" if ignore else ""
  201. query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  202. args_list = [tuple(d.values()) for d in data_list]
  203. elif query is None:
  204. raise ValueError("Either data_list or query must be provided")
  205. total = 0
  206. for i in range(0, len(args_list), batch_size):
  207. batch = args_list[i:i + batch_size]
  208. try:
  209. with self.pool.connection() as conn:
  210. with conn.cursor() as cursor:
  211. cursor.executemany(query, batch)
  212. if commit:
  213. conn.commit()
  214. total += cursor.rowcount
  215. except pymysql.err.IntegrityError as e:
  216. # 处理唯一索引冲突
  217. if "Duplicate entry" in str(e):
  218. if ignore:
  219. # 如果使用了 INSERT IGNORE,理论上不会进这里,但以防万一
  220. self.log.warning(f"批量插入遇到重复条目(ignore模式): {e}")
  221. else:
  222. # 没有使用 IGNORE,降级为逐条插入
  223. self.log.warning(f"批量插入遇到重复条目,开始逐条插入。错误: {e}")
  224. if commit:
  225. conn.rollback()
  226. rowcount = 0
  227. for j, args in enumerate(batch):
  228. try:
  229. if data_list:
  230. # 字典模式
  231. self.insert_one_or_dict(
  232. table=table,
  233. data=dict(zip(data_list[0].keys(), args)),
  234. commit=commit,
  235. ignore=False # 单条插入时手动捕获重复
  236. )
  237. else:
  238. # 原始SQL模式
  239. self.insert_one(query, args)
  240. rowcount += 1
  241. except pymysql.err.IntegrityError as e2:
  242. if "Duplicate entry" in str(e2):
  243. self.log.debug(f"跳过重复条目[{i+j+1}]: {e2}")
  244. else:
  245. self.log.error(f"插入失败[{i+j+1}]: {e2}")
  246. except Exception as e2:
  247. self.log.error(f"插入失败[{i+j+1}]: {e2}")
  248. total += rowcount
  249. self.log.info(f"批次逐条插入完成: 成功{rowcount}/{len(batch)}条")
  250. else:
  251. # 其他完整性错误
  252. self.log.exception(f"数据库完整性错误: {e}")
  253. if commit:
  254. conn.rollback()
  255. raise e
  256. except Exception as e:
  257. # 其他数据库错误
  258. self.log.exception(f"批量插入失败: {e}")
  259. if commit:
  260. conn.rollback()
  261. raise e
  262. if table:
  263. self.log.info(f"sql insert_many, Table: {table}, Total Rows: {total}")
  264. else:
  265. self.log.info(f"sql insert_many, Query: {query}, Total Rows: {total}")
  266. return total
  267. def insert_many_two(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True,
  268. ignore=False):
  269. """
  270. 批量插入(支持字典列表或原始SQL) - 备用方法
  271. :param table: 表名(字典插入时必需)
  272. :param data_list: 字典列表 [{列名: 值}]
  273. :param query: 直接SQL语句(与data_list二选一)
  274. :param args_list: SQL参数列表(query使用时必需)
  275. :param batch_size: 分批大小
  276. :param commit: 是否自动提交
  277. :param ignore: 是否使用INSERT IGNORE
  278. :return: 影响行数
  279. """
  280. if data_list is not None:
  281. if not data_list or not isinstance(data_list[0], dict):
  282. raise ValueError("Data_list must be a non-empty list of dictionaries")
  283. keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
  284. values = ', '.join(['%s'] * len(data_list[0]))
  285. ignore_clause = "IGNORE" if ignore else ""
  286. query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  287. args_list = [tuple(d.values()) for d in data_list]
  288. elif query is None:
  289. raise ValueError("Either data_list or query must be provided")
  290. total = 0
  291. for i in range(0, len(args_list), batch_size):
  292. batch = args_list[i:i + batch_size]
  293. try:
  294. with self.pool.connection() as conn:
  295. with conn.cursor() as cursor:
  296. cursor.executemany(query, batch)
  297. if commit:
  298. conn.commit()
  299. total += cursor.rowcount
  300. except pymysql.err.IntegrityError as e:
  301. if "Duplicate entry" in str(e) and not ignore:
  302. self.log.warning(f"批量插入遇到重复,降级为逐条插入: {e}")
  303. if commit:
  304. conn.rollback()
  305. rowcount = 0
  306. for args in batch:
  307. try:
  308. self.insert_one(query, args)
  309. rowcount += 1
  310. except pymysql.err.IntegrityError as e2:
  311. if "Duplicate entry" in str(e2):
  312. self.log.debug(f"跳过重复条目: {e2}")
  313. else:
  314. self.log.error(f"插入失败: {e2}")
  315. except Exception as e2:
  316. self.log.error(f"插入失败: {e2}")
  317. total += rowcount
  318. else:
  319. self.log.exception(f"数据库完整性错误: {e}")
  320. if commit:
  321. conn.rollback()
  322. raise e
  323. except Exception as e:
  324. self.log.exception(f"批量插入失败: {e}")
  325. if commit:
  326. conn.rollback()
  327. raise e
  328. self.log.info(f"sql insert_many_two, Table: {table}, Total Rows: {total}")
  329. return total
  330. def insert_too_many(self, query, args_list, batch_size=1000):
  331. """
  332. 执行批量插入语句,分片提交, 单次插入大于十万+时可用, 如果失败则降级为逐条插入
  333. :param query: 插入语句
  334. :param args_list: 插入参数列表
  335. :param batch_size: 每次插入的条数
  336. """
  337. self.log.info(f"sql insert_too_many, Query: {query}, Total Rows: {len(args_list)}")
  338. for i in range(0, len(args_list), batch_size):
  339. batch = args_list[i:i + batch_size]
  340. try:
  341. with self.pool.connection() as conn:
  342. with conn.cursor() as cursor:
  343. cursor.executemany(query, batch)
  344. conn.commit()
  345. self.log.debug(f"insert_too_many -> Total Rows: {len(batch)}")
  346. except Exception as e:
  347. self.log.error(f"insert_too_many error. Trying single insert. Error: {e}")
  348. # 当前批次降级为单条插入
  349. for args in batch:
  350. self.insert_one(query, args)
  351. def update_one(self, query, args):
  352. """
  353. 执行单条更新语句
  354. :param query: 更新语句
  355. :param args: 更新参数
  356. """
  357. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_one 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  358. return self._execute(query, args, commit=True)
  359. def update_all(self, query, args_list):
  360. """
  361. 执行批量更新语句,如果失败则逐条更新
  362. :param query: 更新语句
  363. :param args_list: 更新参数列表
  364. """
  365. conn = None
  366. cursor = None
  367. try:
  368. conn = self.pool.connection()
  369. cursor = conn.cursor()
  370. cursor.executemany(query, args_list)
  371. conn.commit()
  372. self.log.debug(f"sql update_all, SQL: {query}, Rows: {len(args_list)}")
  373. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_all 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  374. except Exception as e:
  375. conn.rollback()
  376. self.log.error(f"Error executing query: {e}")
  377. # 如果批量更新失败,则逐条更新
  378. rowcount = 0
  379. for args in args_list:
  380. self.update_one(query, args)
  381. rowcount += 1
  382. self.log.debug(f'Batch update failed. Updated {rowcount} rows individually.')
  383. finally:
  384. if cursor:
  385. cursor.close()
  386. if conn:
  387. conn.close()
  388. def update_one_or_dict(self, table=None, data=None, condition=None, query=None, args=None, commit=True):
  389. """
  390. 单条更新(支持字典或原始SQL)
  391. :param table: 表名(字典模式必需)
  392. :param data: 字典数据 {列名: 值}(与 query 二选一)
  393. :param condition: 更新条件,支持以下格式:
  394. - 字典: {"id": 1} → "WHERE id = %s"
  395. - 字符串: "id = 1" → "WHERE id = 1"(需自行确保安全)
  396. - 元组: ("id = %s", [1]) → "WHERE id = %s"(参数化查询)
  397. :param query: 直接SQL语句(与 data 二选一)
  398. :param args: SQL参数(query 模式下必需)
  399. :param commit: 是否自动提交
  400. :return: 影响行数
  401. :raises: ValueError 参数校验失败时抛出
  402. """
  403. # 参数校验
  404. if data is not None:
  405. if not isinstance(data, dict):
  406. raise ValueError("Data must be a dictionary")
  407. if table is None:
  408. raise ValueError("Table name is required for dictionary update")
  409. if condition is None:
  410. raise ValueError("Condition is required for dictionary update")
  411. # 构建 SET 子句
  412. set_clause = ", ".join([f"{self._safe_identifier(k)} = %s" for k in data.keys()])
  413. set_values = list(data.values())
  414. # 解析条件
  415. condition_clause, condition_args = self._parse_condition(condition)
  416. query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
  417. args = set_values + condition_args
  418. elif query is None:
  419. raise ValueError("Either data or query must be provided")
  420. # 执行更新
  421. cursor = self._execute(query, args, commit)
  422. # self.log.debug(
  423. # f"Updated table={table}, rows={cursor.rowcount}, query={query[:100]}...",
  424. # extra={"table": table, "rows": cursor.rowcount}
  425. # )
  426. return cursor.rowcount
  427. def _parse_condition(self, condition):
  428. """
  429. 解析条件为 (clause, args) 格式
  430. :param condition: 字典/字符串/元组
  431. :return: (str, list) SQL 子句和参数列表
  432. """
  433. if isinstance(condition, dict):
  434. clause = " AND ".join([f"{self._safe_identifier(k)} = %s" for k in condition.keys()])
  435. args = list(condition.values())
  436. elif isinstance(condition, str):
  437. clause = condition # 注意:需调用方确保安全
  438. args = []
  439. elif isinstance(condition, (tuple, list)) and len(condition) == 2:
  440. clause, args = condition[0], condition[1]
  441. if not isinstance(args, (list, tuple)):
  442. args = [args]
  443. else:
  444. raise ValueError("Condition must be dict/str/(clause, args)")
  445. return clause, args
  446. def update_many(self, table=None, data_list=None, condition_list=None, query=None, args_list=None, batch_size=500,
  447. commit=True):
  448. """
  449. 批量更新(支持字典列表或原始SQL)
  450. :param table: 表名(字典插入时必需)
  451. :param data_list: 字典列表 [{列名: 值}]
  452. :param condition_list: 条件列表(必须为字典,与data_list等长)
  453. :param query: 直接SQL语句(与data_list二选一)
  454. :param args_list: SQL参数列表(query使用时必需)
  455. :param batch_size: 分批大小
  456. :param commit: 是否自动提交
  457. :return: 影响行数
  458. """
  459. if data_list is not None:
  460. if not data_list or not isinstance(data_list[0], dict):
  461. raise ValueError("Data_list must be a non-empty list of dictionaries")
  462. if condition_list is None or len(data_list) != len(condition_list):
  463. raise ValueError("Condition_list must be provided and match the length of data_list")
  464. if not all(isinstance(cond, dict) for cond in condition_list):
  465. raise ValueError("All elements in condition_list must be dictionaries")
  466. # 获取第一个数据项和条件项的键
  467. first_data_keys = set(data_list[0].keys())
  468. first_cond_keys = set(condition_list[0].keys())
  469. # 构造基础SQL
  470. set_clause = ', '.join([self._safe_identifier(k) + ' = %s' for k in data_list[0].keys()])
  471. condition_clause = ' AND '.join([self._safe_identifier(k) + ' = %s' for k in condition_list[0].keys()])
  472. base_query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
  473. total = 0
  474. # 分批次处理
  475. for i in range(0, len(data_list), batch_size):
  476. batch_data = data_list[i:i + batch_size]
  477. batch_conds = condition_list[i:i + batch_size]
  478. batch_args = []
  479. # 检查当前批次的结构是否一致
  480. can_batch = True
  481. for data, cond in zip(batch_data, batch_conds):
  482. data_keys = set(data.keys())
  483. cond_keys = set(cond.keys())
  484. if data_keys != first_data_keys or cond_keys != first_cond_keys:
  485. can_batch = False
  486. break
  487. batch_args.append(tuple(data.values()) + tuple(cond.values()))
  488. if not can_batch:
  489. # 结构不一致,转为单条更新
  490. for data, cond in zip(batch_data, batch_conds):
  491. self.update_one_or_dict(table=table, data=data, condition=cond, commit=commit)
  492. total += 1
  493. continue
  494. # 执行批量更新
  495. try:
  496. with self.pool.connection() as conn:
  497. with conn.cursor() as cursor:
  498. cursor.executemany(base_query, batch_args)
  499. if commit:
  500. conn.commit()
  501. total += cursor.rowcount
  502. self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
  503. except Exception as e:
  504. if commit:
  505. conn.rollback()
  506. self.log.error(f"Batch update failed: {e}")
  507. # 降级为单条更新
  508. for args, data, cond in zip(batch_args, batch_data, batch_conds):
  509. try:
  510. self._execute(base_query, args, commit=commit)
  511. total += 1
  512. except Exception as e2:
  513. self.log.error(f"Single update failed: {e2}, Data: {data}, Condition: {cond}")
  514. self.log.info(f"Total updated rows: {total}")
  515. return total
  516. elif query is not None:
  517. # 处理原始SQL和参数列表
  518. if args_list is None:
  519. raise ValueError("args_list must be provided when using query")
  520. total = 0
  521. for i in range(0, len(args_list), batch_size):
  522. batch_args = args_list[i:i + batch_size]
  523. try:
  524. with self.pool.connection() as conn:
  525. with conn.cursor() as cursor:
  526. cursor.executemany(query, batch_args)
  527. if commit:
  528. conn.commit()
  529. total += cursor.rowcount
  530. self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
  531. except Exception as e:
  532. if commit:
  533. conn.rollback()
  534. self.log.error(f"Batch update failed: {e}")
  535. # 降级为单条更新
  536. for args in batch_args:
  537. try:
  538. self._execute(query, args, commit=commit)
  539. total += 1
  540. except Exception as e2:
  541. self.log.error(f"Single update failed: {e2}, Args: {args}")
  542. self.log.info(f"Total updated rows: {total}")
  543. return total
  544. else:
  545. raise ValueError("Either data_list or query must be provided")
  546. def check_pool_health(self):
  547. """
  548. 检查连接池中有效连接数
  549. # 使用示例
  550. # 配置 MySQL 连接池
  551. sql_pool = MySQLConnectionPool(log=log)
  552. if not sql_pool.check_pool_health():
  553. log.error("数据库连接池异常")
  554. raise RuntimeError("数据库连接池异常")
  555. """
  556. try:
  557. with self.pool.connection() as conn:
  558. conn.ping(reconnect=True)
  559. return True
  560. except Exception as e:
  561. self.log.error(f"Connection pool health check failed: {e}")
  562. return False
  563. def close(self):
  564. """
  565. 关闭连接池,释放所有连接
  566. """
  567. try:
  568. if hasattr(self, 'pool') and self.pool:
  569. self.pool.close()
  570. self.log.info("数据库连接池已关闭")
  571. except Exception as e:
  572. self.log.error(f"关闭连接池失败: {e}")
  573. @staticmethod
  574. def _safe_identifier(name):
  575. """SQL标识符安全校验"""
  576. if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):
  577. raise ValueError(f"Invalid SQL identifier: {name}")
  578. return name
  579. if __name__ == '__main__':
  580. sql_pool = MySQLConnectionPool()
  581. # data_dic = {'card_type_id': 111, 'card_type_name': '补充包 继承的意志【OPC-13】', 'card_type_position': 964,
  582. # 'card_id': 5284, 'card_name': '蒙奇·D·路飞', 'card_number': 'OP13-001', 'card_rarity': 'L',
  583. # 'card_img': 'https://source.windoent.com/OnePiecePc/Picture/1757929283612OP13-001.png',
  584. # 'card_life': '4', 'card_attribute': '打', 'card_power': '5000', 'card_attack': '-',
  585. # 'card_color': '红/绿', 'subscript': 4, 'card_features': '超新星/草帽一伙',
  586. # 'card_text_desc': '【咚!!×1】【对方的攻击时】我方处于活跃状态的咚!!不多于5张的场合,可以将我方任意张数的咚!!转为休息状态。每有1张转为休息状态的咚!!,本次战斗中,此领袖或我方最多1张拥有《草帽一伙》特征的角色力量+2000。',
  587. # 'card_offer_type': '补充包 继承的意志【OPC-13】', 'crawler_language': '简中'}
  588. # sql_pool.insert_one_or_dict(table="one_piece_record", data=data_dic)
  589. sql_pool.insert_many(
  590. table="jhs_product_record",
  591. data_list=[
  592. {
  593. "product_id": 99999991,
  594. "seller_username": "浣熊小助理(裸卡版)",
  595. "auction_product_name": "2000 日文 无编号 #175 U 波克比 有瑕疵",
  596. },
  597. {
  598. "product_id": 99999992,
  599. "seller_username": "测试商家二号",
  600. "auction_product_name": "中文批量插入测试",
  601. },
  602. ],
  603. ignore=False
  604. )