Browse Source

上传文件至 'core'

新增pgvil模块
likaixin10086 4 ngày trước cách đây
mục cha
commit
392adb36ae
1 tập tin đã thay đổi với 54 bổ sung71 xóa
  1. 54 71
      core/optimized_processor.py

+ 54 - 71
core/optimized_processor.py

@@ -3,18 +3,25 @@ from typing import Optional
 import traceback
 
 from .processors.built_in.lst import ZipCSVProcessor, RosbagProcessor, Config
+
 # from .processors.final_processor import FinalDataProcessor
-from core.processors.built_in.lst import data_precheck, run_cpp_engine, FinalDataProcessor
+from core.processors.built_in.lst import (
+    data_precheck,
+    run_cpp_engine,
+    FinalDataProcessor,
+)
+from core.processors.built_in.pgvil import run_pgvil_engine, PGVILProcessor
+from core.processors.built_in.pgvil import Config as PGVILConfig
 
 
 def process_lst_data(
-        zip_data_path: Path,
-        output_base_dir: Path,
-        trafficlight_json_path: Optional[Path] = None,
-        utm_zone: int = 51,
-        x_offset: float = 0.0,
-        y_offset: float = 0.0,
-        continue_to_iterate: bool = False,
+    zip_data_path: Path,
+    output_base_dir: Path,
+    trafficlight_json_path: Optional[Path] = None,
+    utm_zone: int = 51,
+    x_offset: float = 0.0,
+    y_offset: float = 0.0,
+    continue_to_iterate: bool = False,
 ) -> Optional[Path]:
     """
     Processes LST data using an optimized pipeline.
@@ -47,13 +54,15 @@ def process_lst_data(
         config = Config(
             zip_path=zip_data_path.resolve(),
             output_path=output_base_dir.resolve(),
-            json_path=trafficlight_json_path.resolve() if trafficlight_json_path else None,
+            json_path=(
+                trafficlight_json_path.resolve() if trafficlight_json_path else None
+            ),
             dbc_path=Path("_internal/VBox.dbc").resolve(),
             engine_path=Path("_internal/engine").resolve(),
             map_path=Path("_internal/data_map").resolve(),
             utm_zone=utm_zone,
             x_offset=x_offset,
-            y_offset=y_offset
+            y_offset=y_offset,
         )
 
         # Process built-in data types
@@ -79,7 +88,6 @@ def process_lst_data(
 
         if not final_processor.process():
             raise RuntimeError("Final data processing failed")
-
         final_csv_path = config.output_dir / "merged_ObjState.csv"
 
         return final_csv_path
@@ -91,11 +99,11 @@ def process_lst_data(
 
 
 def process_pgvil_data(
-        zip_data_path: Path,
-        output_base_dir: Path,
-        utm_zone: int = 51,
-        x_offset: float = 0.0,
-        y_offset: float = 0.0
+    zip_data_path: Path,
+    output_base_dir: Path,
+    utm_zone: int = 51,
+    x_offset: float = 0.0,
+    y_offset: float = 0.0,
 ) -> Optional[Path]:
     """处理PGVIL数据
 
@@ -109,65 +117,40 @@ def process_pgvil_data(
     Returns:
         Optional[Path]: 处理后的CSV文件路径,处理失败则返回None
     """
+    pgvil_config = PGVILConfig(
+        zip_path=zip_data_path,
+        output_path=output_base_dir,
+        utm_zone=utm_zone,
+        x_offset=x_offset,
+        y_offset=y_offset,
+        engine_path=Path("_internal/engine").resolve(),
+        map_path=Path("_internal/data_map").resolve(),
+    )
+
+    if not zip_data_path.exists():
+        print(f"Error: Input ZIP file not found: {zip_data_path}")
+        return None
     try:
         # 确保输出目录存在
         output_base_dir.mkdir(parents=True, exist_ok=True)
-
+        processor = PGVILProcessor(pgvil_config)
         # 解压ZIP文件
-        if not extract_zip_file(zip_data_path, output_base_dir):
-            return None
-
-        # 查找所有PGVIL数据文件
-        pgvil_files = []
-        for root, _, files in os.walk(output_base_dir):
-            for file in files:
-                if file.lower().endswith(('.csv', '.json')):
-                    pgvil_files.append(Path(root) / file)
-
-        if not pgvil_files:
-            print(f"在 {output_base_dir} 中未找到PGVIL数据文件")
-            return None
-
-        print(f"找到 {len(pgvil_files)} 个PGVIL数据文件")
-
-        # 处理所有PGVIL文件
-        all_data = []
-        for pgvil_file in pgvil_files:
-            try:
-                # 读取PGVIL文件
-                if pgvil_file.suffix.lower() == '.csv':
-                    df = pd.read_csv(pgvil_file)
-                elif pgvil_file.suffix.lower() == '.json':
-                    with open(pgvil_file, 'r') as f:
-                        data = json.load(f)
-                    df = pd.DataFrame(data)
-
-                # 确保必要的列存在
-                required_cols = ['simTime', 'simFrame', 'playerId']
-                for col in required_cols:
-                    if col not in df.columns:
-                        df[col] = 0  # 添加默认值
-
-                all_data.append(df)
-                print(f"成功处理文件: {pgvil_file}")
-            except Exception as e:
-                print(f"处理文件 {pgvil_file} 时出错: {e}")
-
-        if not all_data:
-            print("没有成功处理任何PGVIL文件")
-            return None
-
-        # 合并所有数据
-        combined_df = pd.concat(all_data, ignore_index=True)
-
-        # 保存处理后的数据
-        output_path = output_base_dir / "processed_pgvil_data.csv"
-        combined_df.to_csv(output_path, index=False)
-        print(f"成功处理所有PGVIL数据,结果保存到: {output_path}")
-        return output_path
+        pgvil_root = processor.process_zip()
+        if not pgvil_root.exists():
+            raise RuntimeError("Failed to extract ZIP file")
+        
+
+        # Run C++ engine for additional processing
+        if not run_pgvil_engine(pgvil_config):
+            raise RuntimeError("C++ engine execution failed")
+
+        merged_csv = processor.merge_csv_files()
+        if merged_csv is None or not merged_csv.exists():
+            raise RuntimeError("Failed to merge CSV files")
+        print(f"merged_csv: {merged_csv}")
+        return merged_csv
 
     except Exception as e:
-        print(f"处理PGVIL数据时出错: {e}")
-        import traceback
-        traceback.print_exc()
+        print(f"Error: Processing failed for {zip_data_path.name}: {str(e)}")
         return None
+