__init__.py 2.9 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495
  1. # copyright (c) 2024 PaddlePaddle Authors. All Rights Reserve.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from pathlib import Path
  15. import pickle
  16. from ...base import BaseDatasetChecker
  17. from .dataset_src import check, deep_analyse
  18. from ..model_list import MODELS
  19. class BEVFusionDatasetChecker(BaseDatasetChecker):
  20. entities = MODELS
  21. def check_dataset(self, dataset_dir: str) -> dict:
  22. """check if the dataset meets the specifications and get dataset summary
  23. Args:
  24. dataset_dir (str): the root directory of dataset.
  25. sample_num (int): the number to be sampled.
  26. Returns:
  27. dict: dataset summary.
  28. """
  29. return check(dataset_dir)
  30. def analyse(self, dataset_dir: str) -> dict:
  31. """deep analyse dataset
  32. Args:
  33. dataset_dir (str): the root directory of dataset.
  34. Returns:
  35. dict: the deep analysis results.
  36. """
  37. return deep_analyse(dataset_dir, self.output)
  38. def get_data(self, ann_file, max_sample_num):
  39. infos = self.data_infos(ann_file, max_sample_num)
  40. meta = []
  41. for info in infos:
  42. image_paths = []
  43. cam_orders = [
  44. "CAM_FRONT_LEFT",
  45. "CAM_FRONT",
  46. "CAM_FRONT_RIGHT",
  47. "CAM_BACK_RIGHT",
  48. "CAM_BACK",
  49. "CAM_BACK_LEFT",
  50. ]
  51. for cam_type in cam_orders:
  52. cam_info = info["cams"][cam_type]
  53. cam_data_path = cam_info["data_path"]
  54. image_paths.append(cam_data_path)
  55. meta.append(
  56. {
  57. "sample_idx": info["token"],
  58. "lidar_path": info["lidar_path"],
  59. "image_paths": image_paths,
  60. }
  61. )
  62. return meta
  63. def data_infos(self, ann_file, max_sample_num):
  64. data = pickle.load(open(ann_file, "rb"))
  65. data_infos = list(sorted(data["infos"], key=lambda e: e["timestamp"]))
  66. data_infos = data_infos[:max_sample_num]
  67. return data_infos
  68. def get_show_type(self) -> str:
  69. """get the show type of dataset
  70. Returns:
  71. str: show type
  72. """
  73. return "txt"
  74. def get_dataset_type(self) -> str:
  75. """return the dataset type
  76. Returns:
  77. str: dataset type
  78. """
  79. return "NuscenesMMDataset"