common.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from pathlib import Path
  15. from copy import deepcopy
  16. import joblib
  17. import numpy as np
  18. import pandas as pd
  19. from .....utils.download import download
  20. from .....utils.cache import CACHE_DIR
  21. from ....utils.io.readers import TSReader
  22. from ....utils.io.writers import TSWriter
  23. from ...base import BaseComponent
  24. from .funcs import load_from_dataframe, time_feature
  25. __all__ = [
  26. "ReadTS",
  27. "BuildTSDataset",
  28. "TSCutOff",
  29. "TSNormalize",
  30. "TimeFeature",
  31. "TStoArray",
  32. "BuildPadMask",
  33. "ArraytoTS",
  34. "TSDeNormalize",
  35. "GetAnomaly",
  36. "GetCls",
  37. ]
  38. class ReadTS(BaseComponent):
  39. INPUT_KEYS = ["ts"]
  40. OUTPUT_KEYS = ["ts_path", "ts", "ori_ts"]
  41. DEAULT_INPUTS = {"ts": "ts"}
  42. DEAULT_OUTPUTS = {"ts_path": "ts_path", "ts": "ts", "ori_ts": "ori_ts"}
  43. def __init__(self):
  44. super().__init__()
  45. self._reader = TSReader(backend="pandas")
  46. self._writer = TSWriter(backend="pandas")
  47. def apply(self, ts):
  48. if not isinstance(ts, str):
  49. ts_path = (Path(CACHE_DIR) / "predict_input" / "tmp_ts.csv").as_posix()
  50. self._writer.write(ts_path, ts)
  51. return {"ts_path": ts_path, "ts": ts, "ori_ts": deepcopy(ts)}
  52. ts_path = ts
  53. # XXX: auto download for url
  54. ts_path = self._download_from_url(ts_path)
  55. ts = self._reader.read(ts_path)
  56. return {"ts_path": ts_path, "ts": ts, "ori_ts": deepcopy(ts)}
  57. def _download_from_url(self, in_path):
  58. if in_path.startswith("http"):
  59. file_name = Path(in_path).name
  60. save_path = Path(CACHE_DIR) / "predict_input" / file_name
  61. download(in_path, save_path, overwrite=True)
  62. return save_path.as_posix()
  63. return in_path
  64. class TSCutOff(BaseComponent):
  65. INPUT_KEYS = ["ts", "ori_ts"]
  66. OUTPUT_KEYS = ["ts", "ori_ts"]
  67. DEAULT_INPUTS = {"ts": "ts", "ori_ts": "ori_ts"}
  68. DEAULT_OUTPUTS = {"ts": "ts", "ori_ts": "ori_ts"}
  69. def __init__(self, size):
  70. super().__init__()
  71. self.size = size
  72. def apply(self, ts, ori_ts):
  73. skip_len = self.size.get("skip_chunk_len", 0)
  74. if len(ts) < self.size["in_chunk_len"] + skip_len:
  75. raise ValueError(
  76. f"The length of the input data is {len(ts)}, but it should be at least {self.size['in_chunk_len'] + self.size['skip_chunk_len']} for training."
  77. )
  78. ts_data = ts[-(self.size["in_chunk_len"] + skip_len) :]
  79. return {"ts": ts_data, "ori_ts": ts_data}
  80. class TSNormalize(BaseComponent):
  81. INPUT_KEYS = ["ts"]
  82. OUTPUT_KEYS = ["ts"]
  83. DEAULT_INPUTS = {"ts": "ts"}
  84. DEAULT_OUTPUTS = {"ts": "ts"}
  85. def __init__(self, scale_path, params_info):
  86. super().__init__()
  87. self.scaler = joblib.load(scale_path)
  88. self.params_info = params_info
  89. def apply(self, ts):
  90. """apply"""
  91. if self.params_info.get("target_cols", None) is not None:
  92. ts[self.params_info["target_cols"]] = self.scaler.transform(
  93. ts[self.params_info["target_cols"]]
  94. )
  95. if self.params_info.get("feature_cols", None) is not None:
  96. ts[self.params_info["feature_cols"]] = self.scaler.transform(
  97. ts[self.params_info["feature_cols"]]
  98. )
  99. return {"ts": ts}
  100. class TSDeNormalize(BaseComponent):
  101. INPUT_KEYS = ["pred"]
  102. OUTPUT_KEYS = ["pred"]
  103. DEAULT_INPUTS = {"pred": "pred"}
  104. DEAULT_OUTPUTS = {"pred": "pred"}
  105. def __init__(self, scale_path, params_info):
  106. super().__init__()
  107. self.scaler = joblib.load(scale_path)
  108. self.params_info = params_info
  109. def apply(self, pred):
  110. """apply"""
  111. scale_cols = pred.columns.values.tolist()
  112. pred[scale_cols] = self.scaler.inverse_transform(pred[scale_cols])
  113. return {"pred": pred}
  114. class BuildTSDataset(BaseComponent):
  115. INPUT_KEYS = ["ts", "ori_ts"]
  116. OUTPUT_KEYS = ["ts", "ori_ts"]
  117. DEAULT_INPUTS = {"ts": "ts", "ori_ts": "ori_ts"}
  118. DEAULT_OUTPUTS = {"ts": "ts", "ori_ts": "ori_ts"}
  119. def __init__(self, params_info):
  120. super().__init__()
  121. self.params_info = params_info
  122. def apply(self, ts, ori_ts):
  123. """apply"""
  124. ts_data = load_from_dataframe(ts, **self.params_info)
  125. return {"ts": ts_data, "ori_ts": ts_data}
  126. class TimeFeature(BaseComponent):
  127. INPUT_KEYS = ["ts"]
  128. OUTPUT_KEYS = ["ts"]
  129. DEAULT_INPUTS = {"ts": "ts"}
  130. DEAULT_OUTPUTS = {"ts": "ts"}
  131. def __init__(self, params_info, size, holiday=False):
  132. super().__init__()
  133. self.freq = params_info["freq"]
  134. self.size = size
  135. self.holiday = holiday
  136. def apply(self, ts):
  137. """apply"""
  138. if not self.holiday:
  139. ts = time_feature(
  140. ts,
  141. self.freq,
  142. ["hourofday", "dayofmonth", "dayofweek", "dayofyear"],
  143. self.size["out_chunk_len"],
  144. )
  145. else:
  146. ts = time_feature(
  147. ts,
  148. self.freq,
  149. [
  150. "minuteofhour",
  151. "hourofday",
  152. "dayofmonth",
  153. "dayofweek",
  154. "dayofyear",
  155. "monthofyear",
  156. "weekofyear",
  157. "holidays",
  158. ],
  159. self.size["out_chunk_len"],
  160. )
  161. return {"ts": ts}
  162. class BuildPadMask(BaseComponent):
  163. INPUT_KEYS = ["ts"]
  164. OUTPUT_KEYS = ["ts"]
  165. DEAULT_INPUTS = {"ts": "ts"}
  166. DEAULT_OUTPUTS = {"ts": "ts"}
  167. def __init__(self, input_data):
  168. super().__init__()
  169. self.input_data = input_data
  170. def apply(self, ts):
  171. if "features" in self.input_data:
  172. ts["features"] = ts["past_target"]
  173. if "pad_mask" in self.input_data:
  174. target_dim = len(ts["features"])
  175. max_length = self.input_data["pad_mask"][-1]
  176. if max_length > 0:
  177. ones = np.ones(max_length, dtype=np.int32)
  178. if max_length != target_dim:
  179. target_ndarray = np.array(ts["features"]).astype(np.float32)
  180. target_ndarray_final = np.zeros(
  181. [max_length, target_dim], dtype=np.int32
  182. )
  183. end = min(target_dim, max_length)
  184. target_ndarray_final[:end, :] = target_ndarray
  185. ts["features"] = target_ndarray_final
  186. ones[end:] = 0.0
  187. ts["pad_mask"] = ones
  188. else:
  189. ts["pad_mask"] = ones
  190. return {"ts": ts}
  191. class TStoArray(BaseComponent):
  192. INPUT_KEYS = ["ts"]
  193. OUTPUT_KEYS = ["ts"]
  194. DEAULT_INPUTS = {"ts": "ts"}
  195. DEAULT_OUTPUTS = {"ts": "ts"}
  196. def __init__(self, input_data):
  197. super().__init__()
  198. self.input_data = input_data
  199. def apply(self, ts):
  200. ts_list = []
  201. input_name = list(self.input_data.keys())
  202. input_name.sort()
  203. for key in input_name:
  204. ts_list.append(np.array(ts[key]).astype("float32"))
  205. return {"ts": ts_list}
  206. class ArraytoTS(BaseComponent):
  207. INPUT_KEYS = ["ori_ts", "pred"]
  208. OUTPUT_KEYS = ["pred"]
  209. DEAULT_INPUTS = {"ori_ts": "ori_ts", "pred": "pred"}
  210. DEAULT_OUTPUTS = {"pred": "pred"}
  211. def __init__(self, info_params):
  212. super().__init__()
  213. self.info_params = info_params
  214. def apply(self, ori_ts, pred):
  215. pred = pred[0]
  216. if ori_ts.get("past_target", None) is not None:
  217. ts = ori_ts["past_target"]
  218. elif ori_ts.get("observed_cov_numeric", None) is not None:
  219. ts = ori_ts["observed_cov_numeric"]
  220. elif ori_ts.get("known_cov_numeric", None) is not None:
  221. ts = ori_ts["known_cov_numeric"]
  222. elif ori_ts.get("static_cov_numeric", None) is not None:
  223. ts = ori_ts["static_cov_numeric"]
  224. else:
  225. raise ValueError("No value in ori_ts")
  226. column_name = (
  227. self.info_params["target_cols"]
  228. if "target_cols" in self.info_params
  229. else self.info_params["feature_cols"]
  230. )
  231. if isinstance(self.info_params["freq"], str):
  232. past_target_index = ts.index
  233. if past_target_index.freq is None:
  234. past_target_index.freq = pd.infer_freq(ts.index)
  235. future_target_index = pd.date_range(
  236. past_target_index[-1] + past_target_index.freq,
  237. periods=pred.shape[0],
  238. freq=self.info_params["freq"],
  239. name=self.info_params["time_col"],
  240. )
  241. elif isinstance(self.info_params["freq"], int):
  242. start_idx = max(ts.index) + 1
  243. stop_idx = start_idx + pred.shape[0]
  244. future_target_index = pd.RangeIndex(
  245. start=start_idx,
  246. stop=stop_idx,
  247. step=self.info_params["freq"],
  248. name=self.info_params["time_col"],
  249. )
  250. future_target = pd.DataFrame(
  251. np.reshape(pred, newshape=[pred.shape[0], -1]),
  252. index=future_target_index,
  253. columns=column_name,
  254. )
  255. return {"pred": future_target}
  256. class GetAnomaly(BaseComponent):
  257. INPUT_KEYS = ["ori_ts", "pred"]
  258. OUTPUT_KEYS = ["anomaly"]
  259. DEAULT_INPUTS = {"ori_ts": "ori_ts", "pred": "pred"}
  260. DEAULT_OUTPUTS = {"anomaly": "anomaly"}
  261. def __init__(self, model_threshold, info_params):
  262. super().__init__()
  263. self.model_threshold = model_threshold
  264. self.info_params = info_params
  265. def apply(self, ori_ts, pred):
  266. pred = pred[0]
  267. if ori_ts.get("past_target", None) is not None:
  268. ts = ori_ts["past_target"]
  269. elif ori_ts.get("observed_cov_numeric", None) is not None:
  270. ts = ori_ts["observed_cov_numeric"]
  271. elif ori_ts.get("known_cov_numeric", None) is not None:
  272. ts = ori_ts["known_cov_numeric"]
  273. elif ori_ts.get("static_cov_numeric", None) is not None:
  274. ts = ori_ts["static_cov_numeric"]
  275. else:
  276. raise ValueError("No value in ori_ts")
  277. column_name = (
  278. self.info_params["target_cols"]
  279. if "target_cols" in self.info_params
  280. else self.info_params["feature_cols"]
  281. )
  282. anomaly_score = np.mean(np.square(pred - np.array(ts)), axis=-1)
  283. anomaly_label = (anomaly_score >= self.model_threshold) + 0
  284. past_target_index = ts.index
  285. past_target_index.name = self.info_params["time_col"]
  286. anomaly_label = pd.DataFrame(
  287. np.reshape(anomaly_label, newshape=[pred.shape[0], -1]),
  288. index=past_target_index,
  289. columns=["label"],
  290. )
  291. return {"anomaly": anomaly_label}
  292. class GetCls(BaseComponent):
  293. INPUT_KEYS = ["pred"]
  294. OUTPUT_KEYS = ["classification"]
  295. DEAULT_INPUTS = {"pred": "pred"}
  296. DEAULT_OUTPUTS = {"classification": "classification"}
  297. def __init__(self):
  298. super().__init__()
  299. def apply(self, pred):
  300. pred_ts = pred[0]
  301. pred_ts -= np.max(pred_ts, axis=-1, keepdims=True)
  302. pred_ts = np.exp(pred_ts) / np.sum(np.exp(pred_ts), axis=-1, keepdims=True)
  303. classid = np.argmax(pred_ts, axis=-1)
  304. pred_score = pred_ts[classid]
  305. result = pd.DataFrame.from_dict({"classid": [classid], "score": [pred_score]})
  306. result.index.name = "sample"
  307. return {"classification": result}