data_manager.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354
  1. import pandas as pd
  2. import json
  3. from typing import List, Dict, Any, Tuple
  4. import os
  5. import csv
  6. import io
  7. class DataManager:
  8. """统一管理数据加载、验证和预处理"""
  9. # 字段映射(支持多种别名)
  10. FIELD_MAPPING = {
  11. 'txId': ['txId', 'transaction_id', '交易ID', 'id'],
  12. 'txDate': ['txDate', 'transaction_date', '交易日期', 'date'],
  13. 'txTime': ['txTime', 'transaction_time', '交易时间', 'time'],
  14. 'txAmount': ['txAmount', 'amount', '交易金额', '金额'],
  15. 'txBalance': ['txBalance', 'balance', '余额', '交易后余额'],
  16. 'txDirection': ['txDirection', 'direction', '交易方向', '收支'],
  17. 'txSummary': ['txSummary', 'summary', '交易摘要', '摘要', '说明'],
  18. 'txCounterparty': ['txCounterparty', 'counterparty', '交易对手', '对方账户'],
  19. 'createdAt': ['createdAt', 'created_at', '创建时间']
  20. }
  21. # 必需字段
  22. REQUIRED_FIELDS = ['txId', 'txDate', 'txTime','txAmount', 'txDirection', 'txBalance', 'txSummary', 'txCounterparty']
  23. @staticmethod
  24. def display_data_info(df: pd.DataFrame):
  25. """显示数据信息"""
  26. print("\n📊 标准化数据基本信息:")
  27. print(f" 总记录数: {len(df)}")
  28. print(f" 字段数: {len(df.columns)}")
  29. @staticmethod
  30. def load_from_standardized_csv(csv_path: str) -> pd.DataFrame:
  31. """从标准化后的 CSV 文件中加载数据"""
  32. try:
  33. if not os.path.exists(csv_path):
  34. print(f"标准化CSV数据文件:{csv_path} 不存在")
  35. raise ValueError(f"标准化CSV数据文件:{csv_path} 不存在")
  36. # 读取标准化后的CSV文件
  37. df = pd.read_csv(csv_path)
  38. print(f"✅ 标准化CSV文件加载成功,共 {len(df)} 行数据")
  39. # 验证必需字段
  40. missing_columns = [col for col in DataManager.REQUIRED_FIELDS if col not in df.columns]
  41. if missing_columns:
  42. raise ValueError(f"CSV文件缺少必需字段: {missing_columns}")
  43. # 显示数据信息
  44. DataManager.display_data_info(df)
  45. return df
  46. except Exception as e:
  47. print(f"❌ 标准化CSV文件加载失败: {e}")
  48. raise
  49. @staticmethod
  50. def load_from_file(file_path: str) -> Tuple[List[Dict[str, Any]], pd.DataFrame]:
  51. """从JSON文件加载数据,支持字段别名"""
  52. if not os.path.exists(file_path):
  53. raise FileNotFoundError(f"数据文件不存在: {file_path}")
  54. with open(file_path, 'r', encoding='utf-8') as f:
  55. raw_data = json.load(f)
  56. if not isinstance(raw_data, list):
  57. raise ValueError("JSON文件内容必须是数组格式")
  58. if not raw_data:
  59. raise ValueError("数据文件为空")
  60. # 标准化字段名
  61. standardized_data = []
  62. for record in raw_data:
  63. standardized_record = {}
  64. for std_field, possible_names in DataManager.FIELD_MAPPING.items():
  65. for name in possible_names:
  66. if name in record:
  67. standardized_record[std_field] = record[name]
  68. break
  69. # 保留原始数据中未映射的字段
  70. for key, value in record.items():
  71. if key not in [name for names in DataManager.FIELD_MAPPING.values() for name in names]:
  72. standardized_record[key] = value
  73. standardized_data.append(standardized_record)
  74. # 转换为DataFrame并优化
  75. df = pd.DataFrame(standardized_data)
  76. df = DataManager._optimize_dataframe(df)
  77. return standardized_data, df
  78. @staticmethod
  79. def _optimize_dataframe(df: pd.DataFrame) -> pd.DataFrame:
  80. """优化DataFrame数据类型"""
  81. # 日期字段
  82. if 'txDate' in df.columns:
  83. df['txDate'] = pd.to_datetime(df['txDate'], errors='coerce').dt.date
  84. # 时间字段
  85. if 'txTime' in df.columns:
  86. df['txTime'] = df['txTime'].astype(str)
  87. # 金额字段
  88. for col in ['txAmount', 'txBalance']:
  89. if col in df.columns:
  90. df[col] = pd.to_numeric(df[col], errors='coerce')
  91. # 创建时间
  92. if 'createdAt' in df.columns:
  93. df['createdAt'] = pd.to_datetime(df['createdAt'], errors='coerce')
  94. # 分类字段
  95. if 'txDirection' in df.columns:
  96. df['txDirection'] = df['txDirection'].astype('category')
  97. return df
  98. @staticmethod
  99. def validate_data_schema(data: List[Dict[str, Any]]) -> Tuple[bool, List[str]]:
  100. """验证数据格式"""
  101. errors = []
  102. if not data:
  103. return False, ["数据集为空"]
  104. # 检查必需字段
  105. first_record = data[0]
  106. missing_fields = []
  107. for field in DataManager.REQUIRED_FIELDS:
  108. if field not in first_record:
  109. missing_fields.append(field)
  110. if missing_fields:
  111. errors.append(f"缺少必需字段: {', '.join(missing_fields)}")
  112. return len(errors) == 0, errors
  113. @staticmethod
  114. def format_data_summary(data: List[Dict[str, Any]]) -> str:
  115. """生成数据摘要"""
  116. if not data:
  117. return "数据集为空"
  118. df = pd.DataFrame(data)
  119. summary = []
  120. summary.append(f"记录总数: {len(data)}")
  121. if 'txDate' in df.columns:
  122. summary.append(f"日期范围: {df['txDate'].min()} 至 {df['txDate'].max()}")
  123. if 'txAmount' in df.columns:
  124. summary.append(f"金额范围: {df['txAmount'].min()} 至 {df['txAmount'].max()}")
  125. if 'txDirection' in df.columns:
  126. direction_counts = df['txDirection'].value_counts().to_dict()
  127. summary.append(f"收支分布: {direction_counts}")
  128. return " | ".join(summary)
  129. @staticmethod
  130. def load_data_from_csv_file(file_path: str) -> List[Dict[str, Any]]:
  131. """
  132. 从CSV文件中加载数据,自动转换数字类型
  133. :param file_path: CSV文件绝对路径
  134. :return: 处理后的数据列表
  135. """
  136. if not os.path.exists(file_path):
  137. raise FileNotFoundError(f"数据文件不存在: {file_path}")
  138. with open(file_path, 'r', encoding='utf-8') as f:
  139. reader = csv.DictReader(f)
  140. json_list = [row for row in reader]
  141. if not isinstance(json_list, list):
  142. raise ValueError("CSV文件内容必须是数组格式")
  143. if not json_list:
  144. raise ValueError("数据文件为空")
  145. # 定义需要转换为数字的字段
  146. numeric_fields = ['txAmount', 'txBalance']
  147. # 对每条记录进行数字类型转换
  148. for record in json_list:
  149. for field in numeric_fields:
  150. if field in record and record[field] is not None and record[field] != '':
  151. try:
  152. # 尝试转换为float,如果是整数则转换为int
  153. value = float(record[field])
  154. if value == int(value):
  155. record[field] = int(value)
  156. else:
  157. record[field] = value
  158. except (ValueError, TypeError):
  159. # 如果转换失败,保持原字符串格式
  160. pass
  161. return json_list
  162. def write_json_to_csv(json_data, csv_file_path, field_order=None) -> bool:
  163. """
  164. 将符合 [{}] 结构的 JSON 对象写入 CSV 文件,并允许指定字段顺序
  165. :param json_data: 符合 [{}] 结构的 JSON 对象,例如 [{
  166. "txId": "TX202301050001",
  167. "txDate": "2023-01-05",
  168. "txTime": "09:15",
  169. "txAmount": 3200,
  170. "txBalance": 3200,
  171. "txDirection": "收入",
  172. "txSummary": "水稻销售收入 (优质粳稻)",
  173. "txCounterparty": "金穗粮食贸易公司",
  174. "createdAt": "2025-11-30 05:57"
  175. }]
  176. :param csv_file_path: CSV 文件的路径
  177. :param field_order: 字段顺序列表,例如 ["txId", "txDate", "txTime"...]。如果未指定,则按字典键的顺序写入
  178. :return 是否写入成功 True:成功 False:失败
  179. """
  180. succ = True
  181. try:
  182. # 将 JSON 数据转换为 Python 的列表
  183. data = json.loads(json.dumps(json_data))
  184. # 检查数据是否为空
  185. if not data:
  186. print("JSON 数据为空,无法写入 CSV 文件")
  187. return False
  188. # 如果未指定字段顺序,则使用第一个字典的键作为字段顺序
  189. if field_order is None:
  190. field_order = list(data[0].keys())
  191. # 打开 CSV 文件并写入数据
  192. with open(csv_file_path, mode='w', newline='', encoding='utf-8') as csv_file:
  193. writer = csv.DictWriter(csv_file, fieldnames=field_order)
  194. # 写入列名
  195. writer.writeheader()
  196. # 写入数据
  197. writer.writerows(data)
  198. print(f"数据已成功写入 {csv_file_path}")
  199. except Exception as e:
  200. print(f"写入 CSV 文件时发生错误:{e}")
  201. import traceback
  202. traceback.print_exc()
  203. succ = False
  204. return succ
  205. def write_json_to_csv(json_data, csv_file_path, field_order=None) -> bool:
  206. """
  207. 将符合 [{}] 结构的 JSON 对象写入 CSV 文件,并允许指定字段顺序
  208. :param json_data: 符合 [{}] 结构的 JSON 对象,例如 [{
  209. "txId": "TX202301050001",
  210. "txDate": "2023-01-05",
  211. "txTime": "09:15",
  212. "txAmount": 3200,
  213. "txBalance": 3200,
  214. "txDirection": "收入",
  215. "txSummary": "水稻销售收入 (优质粳稻)",
  216. "txCounterparty": "金穗粮食贸易公司",
  217. "createdAt": "2025-11-30 05:57"
  218. }]
  219. :param csv_file_path: CSV 文件的路径
  220. :param field_order: 字段顺序列表,例如 ["txId", "txDate", "txTime"...]。如果未指定,则按字典键的顺序写入
  221. :return 是否写入成功 True:成功 False:失败
  222. """
  223. succ = True
  224. try:
  225. # 将 JSON 数据转换为 Python 的列表
  226. data = json.loads(json.dumps(json_data))
  227. # 检查数据是否为空
  228. if not data:
  229. print("JSON 数据为空,无法写入 CSV 文件")
  230. return False
  231. # 如果未指定字段顺序,则使用第一个字典的键作为字段顺序
  232. if field_order is None:
  233. field_order = list(data[0].keys())
  234. # 打开 CSV 文件并写入数据
  235. with open(csv_file_path, mode='w', newline='', encoding='utf-8') as csv_file:
  236. writer = csv.DictWriter(csv_file, fieldnames=field_order)
  237. # 写入列名
  238. writer.writeheader()
  239. # 写入数据
  240. writer.writerows(data)
  241. print(f"数据已成功写入 {csv_file_path}")
  242. except Exception as e:
  243. print(f"写入 CSV 文件时发生错误:{e}")
  244. succ = False
  245. return succ
  246. @staticmethod
  247. def json_to_csv_string(json_data: List[Dict[str, Any]], fieldnames: List[str]):
  248. """
  249. 将 JSON 数据(格式为 [{}])转换为 CSV 格式的字符串,并指定字段顺序。
  250. :param json_data: JSON 数据,格式为 [{}]
  251. :param fieldnames: 字段顺序列表
  252. :return: CSV 格式的字符串
  253. """
  254. # 检查输入数据是否为空
  255. if not json_data:
  256. raise ValueError("JSON 数据为空")
  257. # 检查字段顺序是否为空
  258. if not fieldnames:
  259. raise ValueError("字段顺序不能为空")
  260. # 使用 StringIO 来生成 CSV 字符串
  261. output = io.StringIO()
  262. writer = csv.DictWriter(output, fieldnames=fieldnames)
  263. # 写入表头
  264. writer.writeheader()
  265. # 写入每行数据
  266. for item in json_data:
  267. writer.writerow(item)
  268. # 获取生成的 CSV 字符串
  269. csv_string = output.getvalue()
  270. output.close()
  271. return csv_string
  272. if __name__ == '__main__':
  273. import os
  274. csv_file = "data_files/11111_data_standard_20260113_112906.csv"
  275. curr_dir = os.path.dirname(os.path.abspath(__file__))
  276. file_path = os.path.join(curr_dir, "..", "..", csv_file)
  277. print(f"{file_path}, 是否存在:{os.path.exists(file_path)}")
  278. df = DataManager.load_from_standardized_csv(file_path)
  279. print(f"记录:{df}")