optimized_processor.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188
  1. from pathlib import Path
  2. from typing import Optional
  3. import traceback
  4. import sys
  5. import os
  6. from .processors.built_in.lst import ZipCSVProcessor, RosbagProcessor, Config
  7. # from .processors.final_processor import FinalDataProcessor
  8. from core.processors.built_in.lst import (
  9. data_precheck,
  10. run_cpp_engine,
  11. FinalDataProcessor,
  12. )
  13. from core.processors.built_in.pgvil import run_pgvil_engine, PGVILProcessor
  14. from core.processors.built_in.pgvil import Config as PGVILConfig
  15. def get_base_path():
  16. """获取可执行文件所在目录"""
  17. # if getattr(sys, 'frozen', False): #pyinstaller打包后环境(pyinstaller会自动设置sys.frozen)
  18. if "__compiled__" in globals(): # nuikta会通过这个语句检测是否被Nuitka打包
  19. base_path = os.path.dirname(sys.executable) # 可执行文件目录
  20. # base_path1 = getattr(sys, '_MEIPASS', os.path.dirname(sys.executable)) # 可执行文件目录
  21. # base_path = Path(base_path1).parent
  22. print("base_path", Path(base_path))
  23. else: # 开发环境
  24. base_path = os.path.dirname('.')
  25. print("file path is", base_path)
  26. return base_path
  27. def resource_path(relative_path):
  28. """ 获取资源绝对路径,兼容开发环境和单文件模式 """
  29. if hasattr(sys, '_MEIPASS'):
  30. base_path = sys._MEIPASS
  31. else:
  32. base_path = os.path.abspath(".")
  33. return os.path.join(base_path, relative_path)
  34. def process_lst_data(
  35. zip_data_path: Path,
  36. output_base_dir: Path,
  37. trafficlight_json_path: Optional[Path] = None,
  38. utm_zone: int = 51,
  39. x_offset: float = 0.0,
  40. y_offset: float = 0.0,
  41. continue_to_iterate: bool = False,
  42. ) -> Optional[Path]:
  43. """
  44. Processes LST data using an optimized pipeline.
  45. Args:
  46. zip_data_path: Path to the input ZIP file
  47. output_base_dir: Base directory for output
  48. trafficlight_json_path: Optional path to traffic light JSON file
  49. utm_zone: UTM zone for coordinate projection
  50. x_offset: X offset for C++ engine
  51. y_offset: Y offset for C++ engine
  52. continue_to_iterate: Flag to control iteration continuation
  53. Returns:
  54. Path to the final merged_ObjState.csv file if successful, None otherwise
  55. """
  56. print(f"Starting LST data processing for: {zip_data_path.name}")
  57. # Validate input paths
  58. if not zip_data_path.exists():
  59. print(f"Error: Input ZIP file not found: {zip_data_path}")
  60. return None
  61. if not trafficlight_json_path:
  62. print(f"Warning: Traffic light JSON file not found: {trafficlight_json_path}")
  63. trafficlight_json_path = None
  64. try:
  65. base_path = get_base_path()
  66. # Initialize configuration
  67. config = Config(
  68. zip_path=zip_data_path.resolve(),
  69. output_path=output_base_dir.resolve(),
  70. json_path=(
  71. trafficlight_json_path.resolve() if trafficlight_json_path else None
  72. ),
  73. # dbc_path = os.path.join(base_path, '_internal/VBox.dbc'),
  74. # engine_path = os.path.join(base_path, '_internal/engine'),
  75. # map_path = os.path.join(base_path, '_internal/data_map'),
  76. dbc_path=resource_path('VBox.dbc'),
  77. engine_path=resource_path('engine'),
  78. map_path=resource_path('data_map'),
  79. utm_zone=utm_zone,
  80. x_offset=x_offset,
  81. y_offset=y_offset,
  82. )
  83. print("engine path is", config.engine_path)
  84. # Process built-in data types
  85. print("Processing built-in data types...")
  86. zip_processor = ZipCSVProcessor(config)
  87. zip_processor.process_zip()
  88. # Process rosbag data if available
  89. rosbag_processor = RosbagProcessor(config)
  90. rosbag_processor.process_zip_for_rosbags()
  91. # Run C++ engine for additional processing
  92. if not run_cpp_engine(config):
  93. raise RuntimeError("C++ engine execution failed")
  94. # Validate processed data
  95. if not data_precheck(config.output_dir):
  96. raise ValueError("Data quality pre-check failed")
  97. # Final processing of built-in data
  98. print("Processing and merging built-in data...")
  99. final_processor = FinalDataProcessor(config)
  100. if not final_processor.process():
  101. raise RuntimeError("Final data processing failed")
  102. final_csv_path = config.output_dir / "merged_ObjState.csv"
  103. return final_csv_path
  104. except Exception as e:
  105. print(f"Error: Processing failed for {zip_data_path.name}: {str(e)}")
  106. print(f"Debug: Stacktrace: {traceback.format_exc()}")
  107. return None
  108. def process_pgvil_data(
  109. zip_data_path: Path,
  110. output_base_dir: Path,
  111. utm_zone: int = 51,
  112. x_offset: float = 0.0,
  113. y_offset: float = 0.0,
  114. ) -> Optional[Path]:
  115. """处理PGVIL数据
  116. Args:
  117. zip_data_path: ZIP数据文件路径
  118. output_base_dir: 输出基础目录
  119. utm_zone: UTM坐标系区域
  120. x_offset: X坐标偏移量
  121. y_offset: Y坐标偏移量
  122. Returns:
  123. Optional[Path]: 处理后的CSV文件路径,处理失败则返回None
  124. """
  125. base_path = get_base_path()
  126. pgvil_config = PGVILConfig(
  127. zip_path=zip_data_path,
  128. output_path=output_base_dir,
  129. utm_zone=utm_zone,
  130. x_offset=x_offset,
  131. y_offset=y_offset,
  132. # engine_path = os.path.join(base_path, '_internal/engine'),
  133. # map_path = os.path.join(base_path, '_internal/data_map'),
  134. engine_path=resource_path('engine'),
  135. map_path=resource_path('data_map')
  136. )
  137. if not zip_data_path.exists():
  138. print(f"Error: Input ZIP file not found: {zip_data_path}")
  139. return None
  140. try:
  141. # 确保输出目录存在
  142. output_base_dir.mkdir(parents=True, exist_ok=True)
  143. processor = PGVILProcessor(pgvil_config)
  144. # 解压ZIP文件
  145. pgvil_root = processor.process_zip()
  146. if not pgvil_root.exists():
  147. raise RuntimeError("Failed to extract ZIP file")
  148. # Run C++ engine for additional processing
  149. if not run_pgvil_engine(pgvil_config):
  150. raise RuntimeError("C++ engine execution failed")
  151. merged_csv = processor.merge_csv_files()
  152. if merged_csv is None or not merged_csv.exists():
  153. raise RuntimeError("Failed to merge CSV files")
  154. print(f"merged_csv: {merged_csv}")
  155. return merged_csv
  156. except Exception as e:
  157. print(f"Error: Processing failed for {zip_data_path.name}: {str(e)}")
  158. return None