comfort.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. import math
  4. import pandas as pd
  5. import numpy as np
  6. import scipy.signal
  7. from modules.lib.score import Score
  8. from modules.lib.common import get_interpolation, get_frame_with_time
  9. from modules.config import config
  10. from modules.lib.log_manager import LogManager
  11. def peak_valley_decorator(method):
  12. """峰谷检测装饰器"""
  13. def wrapper(self, *args, **kwargs):
  14. peak_valley = self._peak_valley_determination(self.df)
  15. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  16. if pv_list:
  17. p_last = pv_list[0]
  18. for i in range(1, len(pv_list)):
  19. p_curr = pv_list[i]
  20. if self._peak_valley_judgment(p_last, p_curr):
  21. method(self, p_curr, p_last, True, *args, **kwargs)
  22. else:
  23. p_last = p_curr
  24. return method
  25. else:
  26. method(self, [0, 0], [0, 0], False, *args, **kwargs)
  27. return method
  28. return wrapper
  29. class Comfort(object):
  30. """自动驾驶舒适性评估类"""
  31. def __init__(self, data_processed):
  32. self.data_processed = data_processed
  33. self.logger = LogManager().get_logger()
  34. # 初始化数据容器
  35. self.data = data_processed.ego_data
  36. self.ego_df = pd.DataFrame()
  37. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  38. # 统计指标
  39. self.calculated_value = {
  40. 'weaving': 0, 'shake': 0, 'cadence': 0,
  41. 'slamBrake': 0, 'slamAccelerate': 0
  42. }
  43. # 时间序列数据
  44. self.time_list = self.data['simTime'].values.tolist()
  45. self.frame_list = self.data['simFrame'].values.tolist()
  46. # 初始化检测器状态
  47. self.zigzag_count = 0
  48. self.shake_count = 0
  49. self.cadence_count = 0
  50. self.slam_brake_count = 0
  51. self.slam_accel_count = 0
  52. self.zigzag_time_list = []
  53. # 数据预处理
  54. self._get_data()
  55. self._comf_param_cal()
  56. def _get_data(self):
  57. """获取舒适性评估所需数据"""
  58. self.ego_df = self.data[config.COMFORT_INFO].copy()
  59. self.df = self.ego_df.reset_index(drop=True)
  60. # 1. 移除未使用的曲率计算相关代码
  61. def _comf_param_cal(self):
  62. """计算舒适性相关参数"""
  63. # 动态加减速阈值
  64. self.ego_df['ip_acc'] = self.ego_df['v'].apply(
  65. get_interpolation, point1=[18, 4], point2=[72, 2])
  66. self.ego_df['ip_dec'] = self.ego_df['v'].apply(
  67. get_interpolation, point1=[18, -5], point2=[72, -3.5])
  68. # 急刹急加速标记
  69. self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
  70. lambda x: 1 if x < 0 else 0)
  71. self.ego_df['slam_accel'] = (self.ego_df['lon_acc'] - self.ego_df['ip_acc']).apply(
  72. lambda x: 1 if x > 0 else 0)
  73. # 顿挫检测预处理
  74. self.ego_df['cadence'] = self.ego_df.apply(
  75. lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
  76. def _peak_valley_determination(self, df):
  77. """确定车辆角速度的峰值和谷值"""
  78. peaks, _ = scipy.signal.find_peaks(
  79. df['speedH'], height=2.3, distance=3,
  80. prominence=2.3, width=1)
  81. valleys, _ = scipy.signal.find_peaks(
  82. -df['speedH'], height=2.3, distance=3,
  83. prominence=2.3, width=1)
  84. return sorted(list(peaks) + list(valleys))
  85. def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=4.6):
  86. """判断峰谷对是否构成曲折行驶"""
  87. t_diff = p_curr[0] - p_last[0]
  88. v_diff = abs(p_curr[1] - p_last[1])
  89. s = p_curr[1] * p_last[1]
  90. if t_diff < tw and v_diff > avg and s < 0:
  91. if [p_last[0], p_curr[0]] not in self.zigzag_time_list:
  92. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  93. return True
  94. return False
  95. @peak_valley_decorator
  96. def zigzag_count_func(self, p_curr, p_last, flag=True):
  97. """计算曲折行驶次数"""
  98. if flag:
  99. self.zigzag_count += 1
  100. @peak_valley_decorator
  101. def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
  102. """计算曲折行驶强度"""
  103. if flag:
  104. v_diff = abs(p_curr[1] - p_last[1])
  105. t_diff = p_curr[0] - p_last[0]
  106. if t_diff > 0:
  107. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  108. else:
  109. self.zigzag_stre_list = []
  110. def _shake_detector(self, T_diff=0.5):
  111. """检测晃动事件 - 改进版本(不使用车辆轨迹曲率)"""
  112. # lat_acc已经是车辆坐标系下的横向加速度,由data_process.py计算
  113. time_list = []
  114. frame_list = []
  115. # 复制数据以避免修改原始数据
  116. df = self.ego_df.copy()
  117. # 1. 计算横向加速度变化率
  118. df['lat_acc_rate'] = df['lat_acc'].diff() / df['simTime'].diff()
  119. # 2. 计算横摆角速度变化率
  120. df['speedH_rate'] = df['speedH'].diff() / df['simTime'].diff()
  121. # 3. 计算横摆角速度的短期变化特性
  122. window_size = 5 # 5帧窗口
  123. df['speedH_std'] = df['speedH'].rolling(window=window_size, min_periods=2).std()
  124. # 4. 基于车速的动态阈值
  125. # df['lat_acc_threshold'] = df['v'].apply(
  126. # lambda speed: max(0.3, min(0.8, 0.5 * (1 + (speed - 20) / 60)))
  127. # )
  128. v0 = 20 * 5/18 # ≈5.56 m/s
  129. # 递减系数
  130. k = 0.008 * 3.6 # =0.0288 per m/s
  131. df['lat_acc_threshold'] = df['v'].apply(
  132. lambda speed: max(
  133. 1.0, # 下限 1.0 m/s²
  134. min(
  135. 1.8, # 上限 1.8 m/s²
  136. 1.8 - k * (speed - v0) # 线性递减
  137. )
  138. )
  139. )
  140. df['speedH_threshold'] = df['v'].apply(
  141. lambda speed: max(1.5, min(3.0, 2.0 * (1 + (speed - 20) / 60)))
  142. )
  143. # 将计算好的阈值和中间变量保存到self.ego_df中,供其他函数使用
  144. self.ego_df['lat_acc_threshold'] = df['lat_acc_threshold']
  145. self.ego_df['speedH_threshold'] = df['speedH_threshold']
  146. self.ego_df['lat_acc_rate'] = df['lat_acc_rate']
  147. self.ego_df['speedH_rate'] = df['speedH_rate']
  148. self.ego_df['speedH_std'] = df['speedH_std']
  149. # 5. 综合判断晃动条件
  150. # 条件A: 横向加速度超过阈值
  151. condition_A = df['lat_acc'].abs() > df['lat_acc_threshold']
  152. # 条件B: 横向加速度变化率超过阈值
  153. lat_acc_rate_threshold = 0.5 # 横向加速度变化率阈值 (m/s³)
  154. condition_B = df['lat_acc_rate'].abs() > lat_acc_rate_threshold
  155. # 条件C: 横摆角速度有明显变化但不呈现周期性
  156. condition_C = (df['speedH_std'] > df['speedH_threshold']) & (~df['simTime'].isin(self._get_zigzag_times()))
  157. # 综合条件: 满足条件A,且满足条件B或条件C
  158. shake_condition = condition_A & (condition_B | condition_C)
  159. # 筛选满足条件的数据
  160. shake_df = df[shake_condition].copy()
  161. # 修改:按照连续帧号分组,确保只有连续帧超过阈值的才被认为是晃动
  162. if not shake_df.empty:
  163. # 计算帧号差
  164. shake_df['frame_diff'] = shake_df['simFrame'].diff()
  165. # 标记不连续的点(帧号差大于1)
  166. # 通常连续帧的帧号差应该是1
  167. shake_df['is_new_group'] = shake_df['frame_diff'] > 1
  168. # 第一个点标记为新组
  169. if not shake_df.empty:
  170. shake_df.iloc[0, shake_df.columns.get_loc('is_new_group')] = True
  171. # 创建组ID
  172. shake_df['group_id'] = shake_df['is_new_group'].cumsum()
  173. # 按组计算帧数和持续时间
  174. group_info = shake_df.groupby('group_id').agg({
  175. 'simTime': ['min', 'max'],
  176. 'simFrame': ['min', 'max', 'count'] # 添加count计算每组的帧数
  177. })
  178. group_info.columns = ['start_time', 'end_time', 'start_frame', 'end_frame', 'frame_count']
  179. group_info['duration'] = group_info['end_time'] - group_info['start_time']
  180. # 筛选连续帧数超过阈值的组
  181. # 假设采样率为100Hz,则0.5秒对应约50帧
  182. MIN_FRAME_COUNT = 5 # 最小连续帧数阈值,可根据实际采样率调整
  183. valid_groups = group_info[group_info['frame_count'] >= MIN_FRAME_COUNT]
  184. # 如果有有效的晃动组
  185. if not valid_groups.empty:
  186. # 获取有效组的ID
  187. valid_group_ids = valid_groups.index.tolist()
  188. # 筛选属于有效组的数据点
  189. valid_shake_df = shake_df[shake_df['group_id'].isin(valid_group_ids)]
  190. # 简化场景分类,只收集时间和帧号
  191. for group_id, group in valid_shake_df.groupby('group_id'):
  192. # 不再使用curvHor进行场景分类,而是使用横摆角速度和转向灯状态
  193. # 直道场景(横摆角速度小,无转向灯)
  194. straight_mask = (group.lightMask == 0) & (group.speedH.abs() < 2.0)
  195. # 换道场景(有转向灯,横摆角速度适中)
  196. lane_change_mask = (group.lightMask != 0) & (group.speedH.abs() < 5.0)
  197. # 转弯场景(横摆角速度大或有转向灯且横摆角速度适中)
  198. turning_mask = (group.speedH.abs() >= 5.0) | ((group.lightMask != 0) & (group.speedH.abs() >= 2.0))
  199. # 为每种场景添加标记
  200. if straight_mask.any():
  201. straight_group = group[straight_mask].copy()
  202. time_list.extend(straight_group['simTime'].values)
  203. frame_list.extend(straight_group['simFrame'].values)
  204. if lane_change_mask.any():
  205. lane_change_group = group[lane_change_mask].copy()
  206. time__list.extend(lane_change_group['simTime'].values)
  207. frame_list.extend(lane_change_group['simFrame'].values)
  208. if turning_mask.any():
  209. turning_group = group[turning_mask].copy()
  210. time_list.extend(turning_group['simTime'].values)
  211. frame_list.extend(turning_group['simFrame'].values)
  212. # 准备晃动事件数据
  213. shake_time = []
  214. shake_frame = []
  215. for group_id in valid_group_ids:
  216. start_time = valid_groups.loc[group_id, 'start_time']
  217. end_time = valid_groups.loc[group_id, 'end_time']
  218. start_frame = valid_groups.loc[group_id, 'start_frame']
  219. end_frame = valid_groups.loc[group_id, 'end_frame']
  220. shake_time.append([start_time, end_time])
  221. shake_frame.append([start_frame, end_frame])
  222. self.shake_count = len(shake_time)
  223. if shake_time:
  224. # 保存晃动事件摘要
  225. time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
  226. time_df['duration'] = time_df['end_time'] - time_df['start_time'] # 添加持续时间列
  227. frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
  228. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  229. discomfort_df['type'] = 'shake'
  230. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  231. # 在方法末尾添加以下代码,保存晃动事件的详细数据
  232. # if self.shake_count > 0:
  233. # self._save_shake_data()
  234. # self._plot_shake_analysis()
  235. return time_list
  236. def _save_shake_data(self):
  237. """保存晃动事件的详细数据,用于后续分析"""
  238. import os
  239. # 创建保存目录
  240. save_dir = os.path.join(self.data_processed.data_path, "comfort_analysis")
  241. os.makedirs(save_dir, exist_ok=True)
  242. # 1. 保存所有晃动事件的摘要信息
  243. shake_events = self.discomfort_df[self.discomfort_df['type'] == 'shake'].copy()
  244. if not shake_events.empty:
  245. shake_events.to_csv(os.path.join(save_dir, "shake_events_summary.csv"), index=False)
  246. # 2. 为每个晃动事件保存详细数据
  247. for i, event in shake_events.iterrows():
  248. start_time = event['start_time']
  249. end_time = event['end_time']
  250. # 提取该晃动事件的所有数据帧
  251. event_data = self.ego_df[
  252. (self.ego_df['simTime'] >= start_time) &
  253. (self.ego_df['simTime'] <= end_time)
  254. ].copy()
  255. # 添加一些分析指标
  256. event_data['lat_acc_abs'] = event_data['lat_acc'].abs()
  257. event_data['lat_acc_rate'] = event_data['lat_acc'].diff() / event_data['simTime'].diff()
  258. event_data['speedH_rate'] = event_data['speedH'].diff() / event_data['simTime'].diff()
  259. # 保存该事件的详细数据
  260. event_data.to_csv(
  261. os.path.join(save_dir, f"shake_event_{i+1}_detail.csv"),
  262. index=False
  263. )
  264. # 3. 保存所有晃动事件的汇总统计数据
  265. shake_stats = {
  266. 'total_count': self.shake_count,
  267. 'avg_duration': shake_events['duration'].mean(),
  268. 'max_duration': shake_events['duration'].max(),
  269. 'min_duration': shake_events['duration'].min(),
  270. 'total_duration': shake_events['duration'].sum(),
  271. }
  272. import json
  273. with open(os.path.join(save_dir, "shake_statistics.json"), 'w') as f:
  274. json.dump(shake_stats, f, indent=4)
  275. self.logger.info(f"晃动事件数据已保存至: {save_dir}")
  276. def _plot_shake_analysis(self):
  277. """绘制晃动分析图表,并标记关键阈值和数据点"""
  278. import os
  279. import matplotlib.pyplot as plt
  280. import numpy as np
  281. import pandas as pd
  282. # 创建保存目录
  283. save_dir = os.path.join(self.data_processed.data_path, "comfort_analysis")
  284. os.makedirs(save_dir, exist_ok=True)
  285. # 准备数据
  286. df = self.ego_df.copy()
  287. # 检查必要的列是否存在
  288. required_columns = ['lat_acc_threshold', 'speedH_threshold', 'speedH_std']
  289. missing_columns = [col for col in required_columns if col not in df.columns]
  290. if missing_columns:
  291. self.logger.warning(f"Missing columns for plotting: {missing_columns}, possibly because shake detection was not executed correctly")
  292. # 如果缺少必要的列,重新计算一次
  293. for col in missing_columns:
  294. if col == 'lat_acc_threshold':
  295. df['lat_acc_threshold'] = df['v'].apply(
  296. lambda speed: max(0.3, min(0.8, 0.5 * (1 + (speed - 20) / 60)))
  297. )
  298. elif col == 'speedH_threshold':
  299. df['speedH_threshold'] = df['v'].apply(
  300. lambda speed: max(1.5, min(3.0, 2.0 * (1 + (speed - 20) / 60)))
  301. )
  302. elif col == 'speedH_std':
  303. window_size = 5
  304. df['speedH_std'] = df['speedH'].rolling(window=window_size, min_periods=2).std()
  305. # 创建图表
  306. fig, axs = plt.subplots(3, 1, figsize=(14, 12), sharex=True)
  307. # 绘制横向加速度
  308. axs[0].plot(df['simTime'], df['lat_acc'], 'b-', label='Lateral Acceleration')
  309. axs[0].set_ylabel('Lateral Acceleration (m/s²)')
  310. axs[0].set_title('Shake Analysis')
  311. axs[0].grid(True)
  312. # 绘制动态阈值线
  313. axs[0].plot(df['simTime'], df['lat_acc_threshold'], 'r--', label='Threshold')
  314. axs[0].plot(df['simTime'], -df['lat_acc_threshold'], 'r--')
  315. # 绘制横摆角速度
  316. axs[1].plot(df['simTime'], df['speedH'], 'g-', label='Yaw Rate')
  317. axs[1].set_ylabel('Yaw Rate (deg/s)')
  318. axs[1].grid(True)
  319. # 绘制横摆角速度阈值
  320. axs[1].plot(df['simTime'], df['speedH_threshold'], 'r--', label='Threshold')
  321. axs[1].plot(df['simTime'], -df['speedH_threshold'], 'r--')
  322. # 绘制横摆角速度标准差
  323. axs[1].plot(df['simTime'], df['speedH_std'], 'm-', alpha=0.5, label='Yaw Rate Std')
  324. # 绘制车速
  325. axs[2].plot(df['simTime'], df['v'], 'k-', label='Vehicle Speed')
  326. axs[2].set_xlabel('Time (s)')
  327. axs[2].set_ylabel('Speed (km/h)')
  328. axs[2].grid(True)
  329. # 标记晃动事件
  330. if not self.discomfort_df.empty:
  331. shake_df = self.discomfort_df[self.discomfort_df['type'] == 'shake']
  332. # 为每个晃动事件创建详细标记
  333. for idx, row in shake_df.iterrows():
  334. start_time = row['start_time']
  335. end_time = row['end_time']
  336. # 在所有子图中标记晃动区域
  337. for ax in axs:
  338. ax.axvspan(start_time, end_time, alpha=0.2, color='red')
  339. # 获取晃动期间的数据
  340. shake_period = df[(df['simTime'] >= start_time) & (df['simTime'] <= end_time)]
  341. if not shake_period.empty:
  342. # 找出晃动期间横向加速度的最大值点
  343. max_lat_acc_idx = shake_period['lat_acc'].abs().idxmax()
  344. max_lat_acc_time = shake_period.loc[max_lat_acc_idx, 'simTime']
  345. max_lat_acc_value = shake_period.loc[max_lat_acc_idx, 'lat_acc']
  346. # 标记最大横向加速度点
  347. axs[0].scatter(max_lat_acc_time, max_lat_acc_value, color='red', s=80, zorder=5)
  348. axs[0].annotate(
  349. f'Max: {max_lat_acc_value:.2f} m/s²\nTime: {max_lat_acc_time:.2f}s',
  350. xy=(max_lat_acc_time, max_lat_acc_value),
  351. xytext=(10, 20),
  352. textcoords='offset points',
  353. arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=.2'),
  354. bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.7)
  355. )
  356. # 找出晃动期间横摆角速度的最大值点
  357. max_speedH_idx = shake_period['speedH'].abs().idxmax()
  358. max_speedH_time = shake_period.loc[max_speedH_idx, 'simTime']
  359. max_speedH_value = shake_period.loc[max_speedH_idx, 'speedH']
  360. # 标记最大横摆角速度点
  361. axs[1].scatter(max_speedH_time, max_speedH_value, color='red', s=80, zorder=5)
  362. axs[1].annotate(
  363. f'Max: {max_speedH_value:.2f} deg/s\nTime: {max_speedH_time:.2f}s',
  364. xy=(max_speedH_time, max_speedH_value),
  365. xytext=(10, 20),
  366. textcoords='offset points',
  367. arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=.2'),
  368. bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.7)
  369. )
  370. # 标记晃动开始和结束点
  371. for i in range(2): # 只在前两个子图中标记
  372. # 开始点
  373. start_value = shake_period.iloc[0][['lat_acc', 'speedH'][i]]
  374. axs[i].scatter(start_time, start_value, color='green', s=80, zorder=5)
  375. axs[i].annotate(
  376. f'Start: {start_time:.2f}s',
  377. xy=(start_time, start_value),
  378. xytext=(-10, -30),
  379. textcoords='offset points',
  380. arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=.2'),
  381. bbox=dict(boxstyle='round,pad=0.5', fc='lightgreen', alpha=0.7)
  382. )
  383. # 结束点
  384. end_value = shake_period.iloc[-1][['lat_acc', 'speedH'][i]]
  385. axs[i].scatter(end_time, end_value, color='blue', s=80, zorder=5)
  386. axs[i].annotate(
  387. f'End: {end_time:.2f}s',
  388. xy=(end_time, end_value),
  389. xytext=(10, -30),
  390. textcoords='offset points',
  391. arrowprops=dict(arrowstyle='->', connectionstyle='arc3,rad=.2'),
  392. bbox=dict(boxstyle='round,pad=0.5', fc='lightblue', alpha=0.7)
  393. )
  394. # 添加晃动检测条件说明
  395. textstr = '\n'.join((
  396. 'Shake Detection Conditions:',
  397. '1. Lateral acceleration exceeds dynamic threshold',
  398. '2. High lateral acceleration rate or yaw rate std',
  399. '3. Duration exceeds threshold'
  400. ))
  401. props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
  402. axs[0].text(0.02, 0.98, textstr, transform=axs[0].transAxes, fontsize=10,
  403. verticalalignment='top', bbox=props)
  404. # 添加图例
  405. for ax in axs:
  406. ax.legend(loc='upper right')
  407. # 调整布局并保存
  408. plt.tight_layout()
  409. plt.savefig(os.path.join(save_dir, "shake_analysis.png"), dpi=300)
  410. plt.close()
  411. def _get_zigzag_times(self):
  412. """获取所有画龙事件的时间点,用于排除画龙与晃动的重叠检测"""
  413. zigzag_times = []
  414. for start_time, end_time in self.zigzag_time_list:
  415. # 获取该时间段内的所有时间点
  416. times_in_range = self.ego_df[(self.ego_df['simTime'] >= start_time) &
  417. (self.ego_df['simTime'] <= end_time)]['simTime'].values
  418. zigzag_times.extend(times_in_range)
  419. return zigzag_times
  420. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  421. """处理顿挫数据"""
  422. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  423. return np.nan
  424. elif abs(lon_acc) == 0:
  425. return 0
  426. elif lon_acc > 0 and lon_acc < ip_acc:
  427. return 1
  428. elif lon_acc < 0 and lon_acc > ip_dec:
  429. return -1
  430. else:
  431. return 0
  432. def _cadence_detector(self):
  433. """检测顿挫事件"""
  434. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence', 'v']].copy()
  435. time_list = data['simTime'].values.tolist()
  436. data = data[data['cadence'] != np.nan]
  437. data['cadence_diff'] = data['cadence'].diff()
  438. data.dropna(subset='cadence_diff', inplace=True)
  439. data = data[data['cadence_diff'] != 0]
  440. t_list = data['simTime'].values.tolist()
  441. f_list = data['simFrame'].values.tolist()
  442. TIME_RANGE = 1
  443. group_time = []
  444. group_frame = []
  445. sub_group_time = []
  446. sub_group_frame = []
  447. for i in range(len(f_list)):
  448. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE: # 特征点相邻一秒内的,算作同一组顿挫
  449. sub_group_time.append(t_list[i])
  450. sub_group_frame.append(f_list[i])
  451. else:
  452. group_time.append(sub_group_time)
  453. group_frame.append(sub_group_frame)
  454. sub_group_time = [t_list[i]]
  455. sub_group_frame = [f_list[i]]
  456. group_time.append(sub_group_time)
  457. group_frame.append(sub_group_frame)
  458. group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
  459. group_frame = [g for g in group_frame if len(g) >= 1]
  460. # 输出图表值
  461. cadence_time = [[g[0], g[-1]] for g in group_time]
  462. cadence_frame = [[g[0], g[-1]] for g in group_frame]
  463. if cadence_time:
  464. # 保存顿挫事件摘要
  465. time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
  466. frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
  467. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  468. discomfort_df['type'] = 'cadence'
  469. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  470. # 将顿挫组的起始时间为组重新统计时间
  471. cadence_time_list = [time for pair in cadence_time for time in self.ego_df['simTime'].values if pair[0] <= time <= pair[1]]
  472. self.cadence_count = len(cadence_time)
  473. return cadence_time_list
  474. def _slam_brake_detector(self):
  475. """检测急刹车事件"""
  476. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake', 'v']].copy()
  477. res_df = data[data['slam_brake'] == 1]
  478. t_list = res_df['simTime'].values
  479. f_list = res_df['simFrame'].values.tolist()
  480. TIME_RANGE = 1
  481. group_time = []
  482. group_frame = []
  483. sub_group_time = []
  484. sub_group_frame = []
  485. for i in range(len(f_list)):
  486. if not sub_group_time or f_list[i] - f_list[i - 1] <= TIME_RANGE: # 连续帧的算作同一组急刹
  487. sub_group_time.append(t_list[i])
  488. sub_group_frame.append(f_list[i])
  489. else:
  490. group_time.append(sub_group_time)
  491. group_frame.append(sub_group_frame)
  492. sub_group_time = [t_list[i]]
  493. sub_group_frame = [f_list[i]]
  494. group_time.append(sub_group_time)
  495. group_frame.append(sub_group_frame)
  496. group_time = [g for g in group_time if len(g) >= 2] # 达到两帧算作一次急刹
  497. group_frame = [g for g in group_frame if len(g) >= 2]
  498. # 输出图表值
  499. slam_brake_time = [[g[0], g[-1]] for g in group_time]
  500. slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
  501. if slam_brake_time:
  502. # 保存事件摘要
  503. time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
  504. frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
  505. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  506. discomfort_df['type'] = 'slam_brake'
  507. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  508. time_list = [element for sublist in group_time for element in sublist]
  509. self.slam_brake_count = len(group_time)
  510. return time_list
  511. def _slam_accel_detector(self):
  512. """检测急加速事件"""
  513. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel', 'v']].copy()
  514. res_df = data.loc[data['slam_accel'] == 1]
  515. t_list = res_df['simTime'].values
  516. f_list = res_df['simFrame'].values.tolist()
  517. group_time = []
  518. group_frame = []
  519. sub_group_time = []
  520. sub_group_frame = []
  521. for i in range(len(f_list)):
  522. if not group_time or f_list[i] - f_list[i - 1] <= 1: # 连续帧的算作同一组急加速
  523. sub_group_time.append(t_list[i])
  524. sub_group_frame.append(f_list[i])
  525. else:
  526. group_time.append(sub_group_time)
  527. group_frame.append(sub_group_frame)
  528. sub_group_time = [t_list[i]]
  529. sub_group_frame = [f_list[i]]
  530. group_time.append(sub_group_time)
  531. group_frame.append(sub_group_frame)
  532. group_time = [g for g in group_time if len(g) >= 2]
  533. group_frame = [g for g in group_frame if len(g) >= 2]
  534. # 输出图表值
  535. slam_accel_time = [[g[0], g[-1]] for g in group_time]
  536. slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
  537. if slam_accel_time:
  538. # 保存事件摘要
  539. time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
  540. frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
  541. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  542. discomfort_df['type'] = 'slam_accel'
  543. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  544. time_list = [element for sublist in group_time for element in sublist]
  545. self.slam_accel_count = len(group_time)
  546. return time_list
  547. def comf_statistic(self):
  548. """统计舒适性指标"""
  549. df = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'accelH', 'speedH', 'lat_acc', 'v']].copy()
  550. self.zigzag_count_func()
  551. # self.cal_zigzag_strength_strength()
  552. if self.zigzag_time_list:
  553. # 保存 Weaving (zigzag) 事件摘要
  554. zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
  555. zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
  556. zigzag_df['type'] = 'zigzag'
  557. self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
  558. zigzag_t_list = []
  559. # 只有[t_start, t_end]数对,要提取为完整time list
  560. t_list = df['simTime'].values.tolist()
  561. for t_start, t_end in self.zigzag_time_list:
  562. index_1 = t_list.index(t_start)
  563. index_2 = t_list.index(t_end)
  564. zigzag_t_list.extend(t_list[index_1:index_2 + 1])
  565. zigzag_t_list = list(set(zigzag_t_list))
  566. shake_t_list = self._shake_detector()
  567. cadence_t_list = self._cadence_detector()
  568. slam_brake_t_list = self._slam_brake_detector()
  569. slam_accel_t_list = self._slam_accel_detector()
  570. # 统计结果
  571. self.calculated_value = {
  572. "weaving": self.zigzag_count,
  573. "shake": self.shake_count,
  574. "cadence": self.cadence_count,
  575. "slamBrake": self.slam_brake_count,
  576. "slamAccelerate": self.slam_accel_count
  577. }
  578. self.logger.info(f"舒适性计算完成,统计结果:{self.calculated_value}")
  579. return self.calculated_value
  580. def report_statistic(self):
  581. """生成舒适性评估报告"""
  582. comfort_result = self.comf_statistic()
  583. evaluator = Score(self.data_processed.comfort_config)
  584. result = evaluator.evaluate(comfort_result)
  585. print("\n[舒适性表现及得分情况]")
  586. return result