mysql_pool.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579
  1. # -*- coding: utf-8 -*-
  2. # Author : Charley
  3. # Python : 3.10.8
  4. # Date : 2025/3/25 14:14
  5. import re
  6. import pymysql
  7. import YamlLoader
  8. from loguru import logger
  9. from dbutils.pooled_db import PooledDB
  10. # 获取yaml配置
  11. yaml = YamlLoader.readYaml()
  12. mysqlYaml = yaml.get("mysql")
  13. sql_host = mysqlYaml.getValueAsString("host")
  14. sql_port = mysqlYaml.getValueAsInt("port")
  15. sql_user = mysqlYaml.getValueAsString("username")
  16. sql_password = mysqlYaml.getValueAsString("password")
  17. sql_db = mysqlYaml.getValueAsString("db")
  18. class MySQLConnectionPool:
  19. """
  20. MySQL连接池
  21. """
  22. def __init__(self, mincached=4, maxcached=5, maxconnections=10, log=None):
  23. """
  24. 初始化连接池
  25. :param mincached: 初始化时,链接池中至少创建的链接,0表示不创建
  26. :param maxcached: 池中空闲连接的最大数目(0 或 None 表示池大小不受限制)
  27. :param maxconnections: 允许的最大连接数(0 或 None 表示任意数量的连接)
  28. :param log: 自定义日志记录器
  29. """
  30. # 使用 loguru 的 logger,如果传入了其他 logger,则使用传入的 logger
  31. self.log = log or logger
  32. self.pool = PooledDB(
  33. creator=pymysql,
  34. mincached=mincached,
  35. maxcached=maxcached,
  36. maxconnections=maxconnections,
  37. blocking=True, # 连接池中如果没有可用连接后,是否阻塞等待。True,等待;False,不等待然后报错
  38. host=sql_host,
  39. port=sql_port,
  40. user=sql_user,
  41. password=sql_password,
  42. database=sql_db,
  43. ping=0 # 每次连接使用时自动检查有效性(0=不检查,1=执行query前检查,2=每次执行前检查)
  44. )
  45. def _execute(self, query, args=None, commit=False):
  46. """
  47. 执行SQL
  48. :param query: SQL语句
  49. :param args: SQL参数
  50. :param commit: 是否提交事务
  51. :return: 查询结果
  52. """
  53. try:
  54. with self.pool.connection() as conn:
  55. with conn.cursor() as cursor:
  56. cursor.execute(query, args)
  57. if commit:
  58. conn.commit()
  59. self.log.debug(f"sql _execute, Query: {query}, Rows: {cursor.rowcount}")
  60. return cursor
  61. except Exception as e:
  62. if commit:
  63. conn.rollback()
  64. self.log.exception(f"Error executing query: {e}, Query: {query}, Args: {args}")
  65. raise e
  66. def select_one(self, query, args=None):
  67. """
  68. 执行查询,返回单个结果
  69. :param query: 查询语句
  70. :param args: 查询参数
  71. :return: 查询结果
  72. """
  73. cursor = self._execute(query, args)
  74. return cursor.fetchone()
  75. def select_all(self, query, args=None):
  76. """
  77. 执行查询,返回所有结果
  78. :param query: 查询语句
  79. :param args: 查询参数
  80. :return: 查询结果
  81. """
  82. cursor = self._execute(query, args)
  83. return cursor.fetchall()
  84. def insert_one(self, query, args):
  85. """
  86. 执行单条插入语句
  87. :param query: 插入语句
  88. :param args: 插入参数
  89. """
  90. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  91. cursor = self._execute(query, args, commit=True)
  92. return cursor.lastrowid # 返回插入的ID
  93. def insert_all(self, query, args_list):
  94. """
  95. 执行批量插入语句,如果失败则逐条插入
  96. :param query: 插入语句
  97. :param args_list: 插入参数列表
  98. """
  99. conn = None
  100. cursor = None
  101. try:
  102. conn = self.pool.connection()
  103. cursor = conn.cursor()
  104. cursor.executemany(query, args_list)
  105. conn.commit()
  106. self.log.debug(f"sql insert_all, SQL: {query}, Rows: {len(args_list)}")
  107. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_all 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  108. except Exception as e:
  109. conn.rollback()
  110. self.log.error(f"Batch insertion failed after 5 attempts. Trying single inserts. Error: {e}")
  111. # 如果批量插入失败,则逐条插入
  112. rowcount = 0
  113. for args in args_list:
  114. self.insert_one(query, args)
  115. rowcount += 1
  116. self.log.debug(f"Batch insertion failed. Inserted {rowcount} rows individually.")
  117. finally:
  118. if cursor:
  119. cursor.close()
  120. if conn:
  121. conn.close()
  122. def insert_one_or_dict(self, table=None, data=None, query=None, args=None, commit=True, ignore=False):
  123. """
  124. 单条插入(支持字典或原始SQL)
  125. :param table: 表名(字典插入时必需)
  126. :param data: 字典数据 {列名: 值}
  127. :param query: 直接SQL语句(与data二选一)
  128. :param args: SQL参数(query使用时必需)
  129. :param commit: 是否自动提交
  130. :param ignore: 是否使用ignore
  131. :return: 最后插入ID
  132. """
  133. if data is not None:
  134. if not isinstance(data, dict):
  135. raise ValueError("Data must be a dictionary")
  136. keys = ', '.join([self._safe_identifier(k) for k in data.keys()])
  137. values = ', '.join(['%s'] * len(data))
  138. # query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  139. # 构建 INSERT IGNORE 语句
  140. ignore_clause = "IGNORE" if ignore else ""
  141. # insert_sql = f"INSERT {ignore_clause} INTO {table} ({columns}) VALUES ({placeholders})"
  142. query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  143. args = tuple(data.values())
  144. elif query is None:
  145. raise ValueError("Either data or query must be provided")
  146. # cursor = self._execute(query, args, commit)
  147. # self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
  148. # self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  149. # return cursor.lastrowid
  150. try:
  151. cursor = self._execute(query, args, commit)
  152. self.log.info(f"sql insert_one_or_dict, Table: {table}, Rows: {cursor.rowcount}")
  153. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data insert_one_or_dict 入库中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  154. return cursor.lastrowid
  155. except pymysql.err.IntegrityError as e:
  156. if "Duplicate entry" in str(e):
  157. self.log.warning(f"插入失败:重复条目,已跳过。错误详情: {e}")
  158. # print("插入失败:重复条目", e)
  159. return -1 # 返回 -1 表示重复条目被跳过
  160. else:
  161. self.log.exception(f"数据库完整性错误: {e}")
  162. # print("插入失败:完整性错误", e)
  163. raise
  164. except Exception as e:
  165. # self.log.error(f"未知错误: {str(e)}", exc_info=True)
  166. self.log.exception(f"未知错误: {e}") # 记录完整异常信息
  167. # print("插入失败:未知错误", e)
  168. raise
  169. def insert_many(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True, ignore=False):
  170. """
  171. 批量插入(支持字典列表或原始SQL)
  172. :param table: 表名(字典插入时必需)
  173. :param data_list: 字典列表 [{列名: 值}]
  174. :param query: 直接SQL语句(与data_list二选一)
  175. :param args_list: SQL参数列表(query使用时必需)
  176. :param batch_size: 分批大小
  177. :param commit: 是否自动提交
  178. :param ignore: 是否使用ignore
  179. :return: 影响行数
  180. """
  181. if data_list is not None:
  182. if not data_list or not isinstance(data_list[0], dict):
  183. raise ValueError("Data_list must be a non-empty list of dictionaries")
  184. keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
  185. values = ', '.join(['%s'] * len(data_list[0]))
  186. # 构建 INSERT IGNORE 语句
  187. ignore_clause = "IGNORE" if ignore else ""
  188. # insert_sql = f"INSERT {ignore_clause} INTO {table} ({columns}) VALUES ({placeholders})"
  189. query = f"INSERT {ignore_clause} INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  190. args_list = [tuple(d.values()) for d in data_list]
  191. elif query is None:
  192. raise ValueError("Either data_list or query must be provided")
  193. total = 0
  194. for i in range(0, len(args_list), batch_size):
  195. batch = args_list[i:i + batch_size]
  196. try:
  197. with self.pool.connection() as conn:
  198. with conn.cursor() as cursor:
  199. cursor.executemany(query, batch)
  200. if commit:
  201. conn.commit()
  202. total += cursor.rowcount
  203. except pymysql.Error as e:
  204. if "Duplicate entry" in str(e):
  205. # self.log.warning(f"检测到重复条目,开始逐条插入。错误详情: {e}")
  206. raise e
  207. # rowcount = 0
  208. # for args in batch:
  209. # try:
  210. # self.insert_one_or_dict(table=table, data=dict(zip(data_list[0].keys(), args)),
  211. # commit=commit)
  212. # rowcount += 1
  213. # except pymysql.err.IntegrityError as e2:
  214. # if "Duplicate entry" in str(e2):
  215. # self.log.warning(f"跳过重复条目: {args}")
  216. # else:
  217. # self.log.error(f"插入失败: {e2}, 参数: {args}")
  218. # total += rowcount
  219. else:
  220. self.log.exception(f"数据库错误: {e}")
  221. if commit:
  222. conn.rollback()
  223. raise e
  224. # 重新抛出异常,供外部捕获
  225. # 降级为单条插入
  226. # for args in batch:
  227. # try:
  228. # self.insert_one_or_dict(table=None, query=query, args=args, commit=commit)
  229. # total += 1
  230. # except Exception as e2:
  231. # self.log.error(f"Single insert failed: {e2}")
  232. # continue
  233. if table:
  234. self.log.info(f"sql insert_many, Table: {table}, Total Rows: {total}")
  235. else:
  236. self.log.info(f"sql insert_many, Query: {query}, Total Rows: {total}")
  237. return total
  238. def insert_many_two(self, table=None, data_list=None, query=None, args_list=None, batch_size=1000, commit=True):
  239. """
  240. 批量插入(支持字典列表或原始SQL)
  241. :param table: 表名(字典插入时必需)
  242. :param data_list: 字典列表 [{列名: 值}]
  243. :param query: 直接SQL语句(与data_list二选一)
  244. :param args_list: SQL参数列表(query使用时必需)
  245. :param batch_size: 分批大小
  246. :param commit: 是否自动提交
  247. :return: 影响行数
  248. """
  249. if data_list is not None:
  250. if not data_list or not isinstance(data_list[0], dict):
  251. raise ValueError("Data_list must be a non-empty list of dictionaries")
  252. keys = ', '.join([self._safe_identifier(k) for k in data_list[0].keys()])
  253. values = ', '.join(['%s'] * len(data_list[0]))
  254. query = f"INSERT INTO {self._safe_identifier(table)} ({keys}) VALUES ({values})"
  255. args_list = [tuple(d.values()) for d in data_list]
  256. elif query is None:
  257. raise ValueError("Either data_list or query must be provided")
  258. total = 0
  259. for i in range(0, len(args_list), batch_size):
  260. batch = args_list[i:i + batch_size]
  261. try:
  262. with self.pool.connection() as conn:
  263. with conn.cursor() as cursor:
  264. # 添加调试日志:输出 SQL 和参数示例
  265. # self.log.debug(f"Batch insert SQL: {query}")
  266. # self.log.debug(f"Sample args: {batch[0] if batch else 'None'}")
  267. cursor.executemany(query, batch)
  268. if commit:
  269. conn.commit()
  270. total += cursor.rowcount
  271. # self.log.debug(f"Batch insert succeeded. Rows: {cursor.rowcount}")
  272. except Exception as e: # 明确捕获数据库异常
  273. self.log.exception(f"Batch insert failed: {e}") # 使用 exception 记录堆栈
  274. self.log.error(f"Failed SQL: {query}, Args count: {len(batch)}")
  275. if commit:
  276. conn.rollback()
  277. # 降级为单条插入,并记录每个错误
  278. rowcount = 0
  279. for args in batch:
  280. try:
  281. self.insert_one(query, args)
  282. rowcount += 1
  283. except Exception as e2:
  284. self.log.error(f"Single insert failed: {e2}, Args: {args}")
  285. total += rowcount
  286. self.log.debug(f"Inserted {rowcount}/{len(batch)} rows individually.")
  287. self.log.info(f"sql insert_many, Table: {table}, Total Rows: {total}")
  288. return total
  289. def insert_too_many(self, query, args_list, batch_size=1000):
  290. """
  291. 执行批量插入语句,分片提交, 单次插入大于十万+时可用, 如果失败则降级为逐条插入
  292. :param query: 插入语句
  293. :param args_list: 插入参数列表
  294. :param batch_size: 每次插入的条数
  295. """
  296. self.log.info(f"sql insert_too_many, Query: {query}, Total Rows: {len(args_list)}")
  297. for i in range(0, len(args_list), batch_size):
  298. batch = args_list[i:i + batch_size]
  299. try:
  300. with self.pool.connection() as conn:
  301. with conn.cursor() as cursor:
  302. cursor.executemany(query, batch)
  303. conn.commit()
  304. self.log.debug(f"insert_too_many -> Total Rows: {len(batch)}")
  305. except Exception as e:
  306. self.log.error(f"insert_too_many error. Trying single insert. Error: {e}")
  307. # 当前批次降级为单条插入
  308. for args in batch:
  309. self.insert_one(query, args)
  310. def update_one(self, query, args):
  311. """
  312. 执行单条更新语句
  313. :param query: 更新语句
  314. :param args: 更新参数
  315. """
  316. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_one 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  317. return self._execute(query, args, commit=True)
  318. def update_all(self, query, args_list):
  319. """
  320. 执行批量更新语句,如果失败则逐条更新
  321. :param query: 更新语句
  322. :param args_list: 更新参数列表
  323. """
  324. conn = None
  325. cursor = None
  326. try:
  327. conn = self.pool.connection()
  328. cursor = conn.cursor()
  329. cursor.executemany(query, args_list)
  330. conn.commit()
  331. self.log.debug(f"sql update_all, SQL: {query}, Rows: {len(args_list)}")
  332. self.log.info('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>data update_all 更新中>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
  333. except Exception as e:
  334. conn.rollback()
  335. self.log.error(f"Error executing query: {e}")
  336. # 如果批量更新失败,则逐条更新
  337. rowcount = 0
  338. for args in args_list:
  339. self.update_one(query, args)
  340. rowcount += 1
  341. self.log.debug(f'Batch update failed. Updated {rowcount} rows individually.')
  342. finally:
  343. if cursor:
  344. cursor.close()
  345. if conn:
  346. conn.close()
  347. def update_one_or_dict(self, table=None, data=None, condition=None, query=None, args=None, commit=True):
  348. """
  349. 单条更新(支持字典或原始SQL)
  350. :param table: 表名(字典模式必需)
  351. :param data: 字典数据 {列名: 值}(与 query 二选一)
  352. :param condition: 更新条件,支持以下格式:
  353. - 字典: {"id": 1} → "WHERE id = %s"
  354. - 字符串: "id = 1" → "WHERE id = 1"(需自行确保安全)
  355. - 元组: ("id = %s", [1]) → "WHERE id = %s"(参数化查询)
  356. :param query: 直接SQL语句(与 data 二选一)
  357. :param args: SQL参数(query 模式下必需)
  358. :param commit: 是否自动提交
  359. :return: 影响行数
  360. :raises: ValueError 参数校验失败时抛出
  361. """
  362. # 参数校验
  363. if data is not None:
  364. if not isinstance(data, dict):
  365. raise ValueError("Data must be a dictionary")
  366. if table is None:
  367. raise ValueError("Table name is required for dictionary update")
  368. if condition is None:
  369. raise ValueError("Condition is required for dictionary update")
  370. # 构建 SET 子句
  371. set_clause = ", ".join([f"{self._safe_identifier(k)} = %s" for k in data.keys()])
  372. set_values = list(data.values())
  373. # 解析条件
  374. condition_clause, condition_args = self._parse_condition(condition)
  375. query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
  376. args = set_values + condition_args
  377. elif query is None:
  378. raise ValueError("Either data or query must be provided")
  379. # 执行更新
  380. cursor = self._execute(query, args, commit)
  381. # self.log.debug(
  382. # f"Updated table={table}, rows={cursor.rowcount}, query={query[:100]}...",
  383. # extra={"table": table, "rows": cursor.rowcount}
  384. # )
  385. return cursor.rowcount
  386. def _parse_condition(self, condition):
  387. """
  388. 解析条件为 (clause, args) 格式
  389. :param condition: 字典/字符串/元组
  390. :return: (str, list) SQL 子句和参数列表
  391. """
  392. if isinstance(condition, dict):
  393. clause = " AND ".join([f"{self._safe_identifier(k)} = %s" for k in condition.keys()])
  394. args = list(condition.values())
  395. elif isinstance(condition, str):
  396. clause = condition # 注意:需调用方确保安全
  397. args = []
  398. elif isinstance(condition, (tuple, list)) and len(condition) == 2:
  399. clause, args = condition[0], condition[1]
  400. if not isinstance(args, (list, tuple)):
  401. args = [args]
  402. else:
  403. raise ValueError("Condition must be dict/str/(clause, args)")
  404. return clause, args
  405. def update_many(self, table=None, data_list=None, condition_list=None, query=None, args_list=None, batch_size=500,
  406. commit=True):
  407. """
  408. 批量更新(支持字典列表或原始SQL)
  409. :param table: 表名(字典插入时必需)
  410. :param data_list: 字典列表 [{列名: 值}]
  411. :param condition_list: 条件列表(必须为字典,与data_list等长)
  412. :param query: 直接SQL语句(与data_list二选一)
  413. :param args_list: SQL参数列表(query使用时必需)
  414. :param batch_size: 分批大小
  415. :param commit: 是否自动提交
  416. :return: 影响行数
  417. """
  418. if data_list is not None:
  419. if not data_list or not isinstance(data_list[0], dict):
  420. raise ValueError("Data_list must be a non-empty list of dictionaries")
  421. if condition_list is None or len(data_list) != len(condition_list):
  422. raise ValueError("Condition_list must be provided and match the length of data_list")
  423. if not all(isinstance(cond, dict) for cond in condition_list):
  424. raise ValueError("All elements in condition_list must be dictionaries")
  425. # 获取第一个数据项和条件项的键
  426. first_data_keys = set(data_list[0].keys())
  427. first_cond_keys = set(condition_list[0].keys())
  428. # 构造基础SQL
  429. set_clause = ', '.join([self._safe_identifier(k) + ' = %s' for k in data_list[0].keys()])
  430. condition_clause = ' AND '.join([self._safe_identifier(k) + ' = %s' for k in condition_list[0].keys()])
  431. base_query = f"UPDATE {self._safe_identifier(table)} SET {set_clause} WHERE {condition_clause}"
  432. total = 0
  433. # 分批次处理
  434. for i in range(0, len(data_list), batch_size):
  435. batch_data = data_list[i:i + batch_size]
  436. batch_conds = condition_list[i:i + batch_size]
  437. batch_args = []
  438. # 检查当前批次的结构是否一致
  439. can_batch = True
  440. for data, cond in zip(batch_data, batch_conds):
  441. data_keys = set(data.keys())
  442. cond_keys = set(cond.keys())
  443. if data_keys != first_data_keys or cond_keys != first_cond_keys:
  444. can_batch = False
  445. break
  446. batch_args.append(tuple(data.values()) + tuple(cond.values()))
  447. if not can_batch:
  448. # 结构不一致,转为单条更新
  449. for data, cond in zip(batch_data, batch_conds):
  450. self.update_one_or_dict(table=table, data=data, condition=cond, commit=commit)
  451. total += 1
  452. continue
  453. # 执行批量更新
  454. try:
  455. with self.pool.connection() as conn:
  456. with conn.cursor() as cursor:
  457. cursor.executemany(base_query, batch_args)
  458. if commit:
  459. conn.commit()
  460. total += cursor.rowcount
  461. self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
  462. except Exception as e:
  463. if commit:
  464. conn.rollback()
  465. self.log.error(f"Batch update failed: {e}")
  466. # 降级为单条更新
  467. for args, data, cond in zip(batch_args, batch_data, batch_conds):
  468. try:
  469. self._execute(base_query, args, commit=commit)
  470. total += 1
  471. except Exception as e2:
  472. self.log.error(f"Single update failed: {e2}, Data: {data}, Condition: {cond}")
  473. self.log.info(f"Total updated rows: {total}")
  474. return total
  475. elif query is not None:
  476. # 处理原始SQL和参数列表
  477. if args_list is None:
  478. raise ValueError("args_list must be provided when using query")
  479. total = 0
  480. for i in range(0, len(args_list), batch_size):
  481. batch_args = args_list[i:i + batch_size]
  482. try:
  483. with self.pool.connection() as conn:
  484. with conn.cursor() as cursor:
  485. cursor.executemany(query, batch_args)
  486. if commit:
  487. conn.commit()
  488. total += cursor.rowcount
  489. self.log.debug(f"Batch update succeeded. Rows: {cursor.rowcount}")
  490. except Exception as e:
  491. if commit:
  492. conn.rollback()
  493. self.log.error(f"Batch update failed: {e}")
  494. # 降级为单条更新
  495. for args in batch_args:
  496. try:
  497. self._execute(query, args, commit=commit)
  498. total += 1
  499. except Exception as e2:
  500. self.log.error(f"Single update failed: {e2}, Args: {args}")
  501. self.log.info(f"Total updated rows: {total}")
  502. return total
  503. else:
  504. raise ValueError("Either data_list or query must be provided")
  505. def check_pool_health(self):
  506. """
  507. 检查连接池中有效连接数
  508. # 使用示例
  509. # 配置 MySQL 连接池
  510. sql_pool = MySQLConnectionPool(log=log)
  511. if not sql_pool.check_pool_health():
  512. log.error("数据库连接池异常")
  513. raise RuntimeError("数据库连接池异常")
  514. """
  515. try:
  516. with self.pool.connection() as conn:
  517. conn.ping(reconnect=True)
  518. return True
  519. except Exception as e:
  520. self.log.error(f"Connection pool health check failed: {e}")
  521. return False
  522. @staticmethod
  523. def _safe_identifier(name):
  524. """SQL标识符安全校验"""
  525. if not re.match(r'^[a-zA-Z_][a-zA-Z0-9_]*$', name):
  526. raise ValueError(f"Invalid SQL identifier: {name}")
  527. return name
  528. if __name__ == '__main__':
  529. sql_pool = MySQLConnectionPool()
  530. data_dic = {'card_type_id': 111, 'card_type_name': '补充包 继承的意志【OPC-13】', 'card_type_position': 964,
  531. 'card_id': 5284, 'card_name': '蒙奇·D·路飞', 'card_number': 'OP13-001', 'card_rarity': 'L',
  532. 'card_img': 'https://source.windoent.com/OnePiecePc/Picture/1757929283612OP13-001.png',
  533. 'card_life': '4', 'card_attribute': '打', 'card_power': '5000', 'card_attack': '-',
  534. 'card_color': '红/绿', 'subscript': 4, 'card_features': '超新星/草帽一伙',
  535. 'card_text_desc': '【咚!!×1】【对方的攻击时】我方处于活跃状态的咚!!不多于5张的场合,可以将我方任意张数的咚!!转为休息状态。每有1张转为休息状态的咚!!,本次战斗中,此领袖或我方最多1张拥有《草帽一伙》特征的角色力量+2000。',
  536. 'card_offer_type': '补充包 继承的意志【OPC-13】', 'crawler_language': '简中'}
  537. sql_pool.insert_one_or_dict(table="one_piece_record", data=data_dic)