|
@@ -567,7 +567,9 @@ class ZipCSVProcessor:
|
|
# df_vehicle.dropna(subset=[col for col in required_pos if col in df_vehicle.columns], inplace=True)
|
|
# df_vehicle.dropna(subset=[col for col in required_pos if col in df_vehicle.columns], inplace=True)
|
|
|
|
|
|
try:
|
|
try:
|
|
- df_vehicle["simTime"] = np.round(np.linspace(df_raw["simTime"].tolist()[0], df_raw["simTime"].tolist()[0] + 0.01*(len(df_vehicle)), len(df_vehicle)), 2)
|
|
|
|
|
|
+ # x = np.arange(df_raw["simTime"].tolist()[0], df_raw["simTime"].tolist()[0] + 0.01 * (len(df_vehicle)),0.01)
|
|
|
|
+ # y = f"{x: .02f}"
|
|
|
|
+ df_vehicle["simTime"] = np.round(np.linspace(df_raw["simTime"].tolist()[0]+28800, df_raw["simTime"].tolist()[0]+28800 + 0.01*(len(df_vehicle)), len(df_vehicle)), 2)
|
|
df_vehicle["simFrame"] = np.arange(1, len(df_vehicle) + 1)
|
|
df_vehicle["simFrame"] = np.arange(1, len(df_vehicle) + 1)
|
|
df_vehicle["playerId"] = int(player_id)
|
|
df_vehicle["playerId"] = int(player_id)
|
|
df_vehicle['playerId'] = pd.to_numeric(df_vehicle['playerId']).astype(int)
|
|
df_vehicle['playerId'] = pd.to_numeric(df_vehicle['playerId']).astype(int)
|
|
@@ -1019,6 +1021,11 @@ class FinalDataProcessor:
|
|
self.config = config
|
|
self.config = config
|
|
self.output_dir = config.output_dir
|
|
self.output_dir = config.output_dir
|
|
|
|
|
|
|
|
+ def find_closet_idx(self, time, df) -> int:
|
|
|
|
+ series = (df['simTime'] - time).abs()
|
|
|
|
+ index = (df['simTime'] - time).abs().idxmin()
|
|
|
|
+ return (df['simTime'] - time).abs().idxmin()
|
|
|
|
+
|
|
def process(self) -> bool:
|
|
def process(self) -> bool:
|
|
"""执行最终数据合并和处理步骤。"""
|
|
"""执行最终数据合并和处理步骤。"""
|
|
print("--- Starting Final Data Processing ---")
|
|
print("--- Starting Final Data Processing ---")
|
|
@@ -1074,14 +1081,23 @@ class FinalDataProcessor:
|
|
df_merged.sort_values(['simTime', 'playerId'], inplace=True)
|
|
df_merged.sort_values(['simTime', 'playerId'], inplace=True)
|
|
|
|
|
|
# 使用 merge_asof 进行就近合并,不包括 simFrame
|
|
# 使用 merge_asof 进行就近合并,不包括 simFrame
|
|
- df_merged = pd.merge_asof(
|
|
|
|
|
|
+ # df_merged = pd.merge_asof(
|
|
|
|
+ # df_merged,
|
|
|
|
+ # df_ego,
|
|
|
|
+ # on='simTime',
|
|
|
|
+ # by='playerId',
|
|
|
|
+ # direction='nearest',
|
|
|
|
+ # tolerance=0.01 # 10ms tolerance
|
|
|
|
+ # )
|
|
|
|
+ df_merged = pd.merge(
|
|
df_merged,
|
|
df_merged,
|
|
df_ego,
|
|
df_ego,
|
|
- on='simTime',
|
|
|
|
- by='playerId',
|
|
|
|
- direction='nearest',
|
|
|
|
- tolerance=0.01 # 10ms tolerance
|
|
|
|
|
|
+ on=["simTime", "playerId"],
|
|
|
|
+ how="left",
|
|
|
|
+ suffixes=("", "_map"),
|
|
)
|
|
)
|
|
|
|
+ if {"posX_map", "posY_map", "posH_map"}.issubset(df_merged.columns):
|
|
|
|
+ df_merged.drop(columns=["posX_map", "posY_map", "posH_map"], inplace=True)
|
|
print("EgoMap data merged.")
|
|
print("EgoMap data merged.")
|
|
except Exception as e:
|
|
except Exception as e:
|
|
print(f"Warning: Could not merge EgoMap data from {egomap_path}: {e}")
|
|
print(f"Warning: Could not merge EgoMap data from {egomap_path}: {e}")
|
|
@@ -1097,14 +1113,14 @@ class FinalDataProcessor:
|
|
|
|
|
|
if 'simTime' in df_function.columns:
|
|
if 'simTime' in df_function.columns:
|
|
df_function['simTime'] = df_function['simTime'].round(2)
|
|
df_function['simTime'] = df_function['simTime'].round(2)
|
|
- df_function['time'] = df_function['simTime'].round(1).astype(float)
|
|
|
|
- df_merged['time'] = df_merged['simTime'].round(1).astype(float)
|
|
|
|
-
|
|
|
|
- common_cols = list(set(df_merged.columns) & set(df_function.columns) - {'time'})
|
|
|
|
|
|
+ df_function['time1'] = df_function['simTime'].apply(lambda x: self.find_closet_idx(x, df_merged))
|
|
|
|
+ common_cols = list(set(df_merged.columns) & set(df_function.columns))
|
|
df_function.drop(columns=common_cols, inplace=True, errors='ignore')
|
|
df_function.drop(columns=common_cols, inplace=True, errors='ignore')
|
|
|
|
+ df_merged = df_merged.merge(df_function, right_on='time1', left_index=True, how='left')
|
|
|
|
+
|
|
|
|
|
|
- df_merged = pd.merge(df_merged, df_function, on=["time"], how="left")
|
|
|
|
- df_merged.drop(columns=['time'], inplace=True)
|
|
|
|
|
|
+ df_merged.drop(columns=['time1'], inplace=True)
|
|
|
|
+ df_merged.reset_index(drop=True, inplace=True)
|
|
print("Function data merged.")
|
|
print("Function data merged.")
|
|
else:
|
|
else:
|
|
print("Warning: 'simTime' column not found in Function.csv. Cannot merge.")
|
|
print("Warning: 'simTime' column not found in Function.csv. Cannot merge.")
|
|
@@ -1122,20 +1138,18 @@ class FinalDataProcessor:
|
|
if 'simFrame' in df_obu.columns:
|
|
if 'simFrame' in df_obu.columns:
|
|
df_obu = df_obu.drop(columns=['simFrame'])
|
|
df_obu = df_obu.drop(columns=['simFrame'])
|
|
|
|
|
|
- df_obu['time'] = df_obu['simTime'].round(1).astype(float)
|
|
|
|
- df_merged['time'] = df_merged['simTime'].round(1).astype(float)
|
|
|
|
-
|
|
|
|
- common_cols = list(set(df_merged.columns) & set(df_obu.columns) - {'time'})
|
|
|
|
|
|
+ df_obu['time2'] = df_obu['simTime'].apply(lambda x: self.find_closet_idx(x, df_merged))
|
|
|
|
+ common_cols = list(set(df_merged.columns) & set(df_obu.columns))
|
|
df_obu.drop(columns=common_cols, inplace=True, errors='ignore')
|
|
df_obu.drop(columns=common_cols, inplace=True, errors='ignore')
|
|
|
|
+ df_merged = df_merged.merge(df_obu, right_on = 'time2', left_index=True, how = 'left')
|
|
|
|
|
|
- df_merged = pd.merge(df_merged, df_obu, on=["time"], how="left")
|
|
|
|
- df_merged.drop(columns=['time'], inplace=True)
|
|
|
|
|
|
+ df_merged.drop(columns=['time2'], inplace=True)
|
|
print("OBU data merged.")
|
|
print("OBU data merged.")
|
|
except Exception as e:
|
|
except Exception as e:
|
|
print(f"Warning: Could not merge OBU data from {obu_path}: {e}")
|
|
print(f"Warning: Could not merge OBU data from {obu_path}: {e}")
|
|
else:
|
|
else:
|
|
print("OBU data not found or empty, skipping merge.")
|
|
print("OBU data not found or empty, skipping merge.")
|
|
-
|
|
|
|
|
|
+ df_merged[['speedH', 'accelX', 'accelY']] = -df_merged[['speedH', 'accelX', 'accelY']]
|
|
return df_merged
|
|
return df_merged
|
|
|
|
|
|
def _process_trafficlight_data(self) -> pd.DataFrame:
|
|
def _process_trafficlight_data(self) -> pd.DataFrame:
|