comfort.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. import math
  4. import pandas as pd
  5. import numpy as np
  6. import scipy.signal
  7. from modules.lib.score import Score
  8. from modules.lib.common import get_interpolation, get_frame_with_time
  9. from modules.config import config
  10. from modules.lib.log_manager import LogManager
  11. def peak_valley_decorator(method):
  12. def wrapper(self, *args, **kwargs):
  13. peak_valley = self._peak_valley_determination(self.df)
  14. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  15. if len(pv_list) != 0:
  16. flag = True
  17. p_last = pv_list[0]
  18. for i in range(1, len(pv_list)):
  19. p_curr = pv_list[i]
  20. if self._peak_valley_judgment(p_last, p_curr):
  21. method(self, p_curr, p_last, flag, *args, **kwargs)
  22. else:
  23. p_last = p_curr
  24. return method
  25. else:
  26. flag = False
  27. p_curr = [0, 0]
  28. p_last = [0, 0]
  29. method(self, p_curr, p_last, flag, *args, **kwargs)
  30. return method
  31. return wrapper
  32. class Comfort(object):
  33. """
  34. Class for achieving comfort metrics for autonomous driving.
  35. Attributes:
  36. dataframe: Vehicle driving data, stored in dataframe format.
  37. """
  38. def __init__(self, data_processed):
  39. self.data_processed = data_processed
  40. self.logger = LogManager().get_logger()
  41. self.data = data_processed.ego_data.copy()
  42. self.ego_df = pd.DataFrame()
  43. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  44. self.calculated_value = {
  45. 'weaving': 0,
  46. 'shake': 0,
  47. 'cadence': 0,
  48. 'slamBrake': 0,
  49. 'slamAccelerate': 0,
  50. }
  51. self.time_list = self.data['simTime'].values.tolist()
  52. self.frame_list = self.data['simFrame'].values.tolist()
  53. # 移除未使用的字典
  54. self.zigzag_count = 0
  55. self.shake_count = 0
  56. self.cadence_count = 0
  57. self.slam_brake_count = 0
  58. self.slam_accel_count = 0
  59. self.zigzag_time_list = []
  60. self.zigzag_stre_list = []
  61. self.cur_ego_path_list = []
  62. self.curvature_list = []
  63. self._get_data()
  64. self._comf_param_cal()
  65. def _get_data(self):
  66. """获取舒适性评估所需数据"""
  67. self.ego_df = self.data[config.COMFORT_INFO].copy()
  68. self.df = self.ego_df.reset_index(drop=True)
  69. def _cal_cur_ego_path(self, row):
  70. """计算车辆轨迹曲率"""
  71. try:
  72. # 计算速度平方和,判断是否接近零
  73. speed_sq = row['speedX']**2 + row['speedY']**2
  74. if speed_sq < 1e-6: # 阈值根据实际场景调整
  75. return 1e5 # 速度接近零时返回极大曲率
  76. divide = speed_sq ** (3/2)
  77. res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  78. return res
  79. except Exception:
  80. return 1e5 # 异常时也返回极大值(如除零、缺失值等)
  81. def _comf_param_cal(self):
  82. """计算舒适性相关参数"""
  83. # 加减速阈值计算
  84. self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
  85. self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
  86. self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
  87. lambda x: 1 if x < 0 else 0)
  88. self.ego_df['slam_accel'] = (self.ego_df['lon_acc'] - self.ego_df['ip_acc']).apply(
  89. lambda x: 1 if x > 0 else 0)
  90. self.ego_df['cadence'] = self.ego_df.apply(
  91. lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
  92. # 晃动检测相关参数
  93. self.ego_df['cur_ego_path'] = self.ego_df.apply(self._cal_cur_ego_path, axis=1)
  94. self.ego_df['curvHor'] = self.ego_df['curvHor'].astype('float')
  95. self.ego_df['cur_diff'] = (self.ego_df['cur_ego_path'] - self.ego_df['curvHor']).abs()
  96. self.ego_df['R'] = self.ego_df['curvHor'].apply(lambda x: 10000 if x == 0 else 1 / x)
  97. self.ego_df['R_ego'] = self.ego_df['cur_ego_path'].apply(lambda x: 10000 if x == 0 else 1 / x)
  98. self.ego_df['R_diff'] = (self.ego_df['R_ego'] - self.ego_df['R']).abs()
  99. self.cur_ego_path_list = self.ego_df['cur_ego_path'].values.tolist()
  100. self.curvature_list = self.ego_df['curvHor'].values.tolist()
  101. def _peak_valley_determination(self, df):
  102. """
  103. 确定车辆角速度的峰值和谷值
  104. """
  105. # 调整参数以减少噪音干扰
  106. peaks, _ = scipy.signal.find_peaks(df['speedH'], height=0.03, distance=3, prominence=0.03, width=1)
  107. valleys, _ = scipy.signal.find_peaks(-df['speedH'], height=0.03, distance=3, prominence=0.03, width=1)
  108. peak_valley = sorted(list(peaks) + list(valleys))
  109. return peak_valley
  110. def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=0.06):
  111. """
  112. 判断给定的峰值和谷值是否满足特定条件
  113. """
  114. t_diff = p_curr[0] - p_last[0]
  115. v_diff = abs(p_curr[1] - p_last[1])
  116. s = p_curr[1] * p_last[1]
  117. zigzag_flag = t_diff < tw and v_diff > avg and s < 0
  118. if zigzag_flag and ([p_last[0], p_curr[0]] not in self.zigzag_time_list):
  119. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  120. return zigzag_flag
  121. @peak_valley_decorator
  122. def zigzag_count_func(self, p_curr, p_last, flag=True):
  123. """计算曲折行驶次数"""
  124. if flag:
  125. self.zigzag_count += 1
  126. @peak_valley_decorator
  127. def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
  128. """计算曲折行驶强度"""
  129. if flag:
  130. v_diff = abs(p_curr[1] - p_last[1])
  131. t_diff = p_curr[0] - p_last[0]
  132. if t_diff > 0:
  133. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  134. else:
  135. self.zigzag_stre_list = []
  136. def _shake_detector(self, Cr_diff=0.05, T_diff=0.39):
  137. """检测晃动事件"""
  138. time_list = []
  139. frame_list = []
  140. df = self.ego_df.copy()
  141. df = df[df['cur_diff'] > Cr_diff]
  142. df['frame_ID_diff'] = df['simFrame'].diff() # 找出行车轨迹曲率与道路曲率之差大于阈值的数据段
  143. filtered_df = df[df.frame_ID_diff > T_diff] # 此处是用大间隔区分多次晃动情景
  144. row_numbers = filtered_df.index.tolist()
  145. cut_column = pd.cut(df.index, bins=row_numbers)
  146. grouped = df.groupby(cut_column)
  147. dfs = {}
  148. for name, group in grouped:
  149. dfs[name] = group.reset_index(drop=True)
  150. for name, df_group in dfs.items():
  151. # 直道,未主动换道
  152. df_group['curvHor'] = df_group['curvHor'].abs()
  153. df_group_straight = df_group[(df_group.lightMask == 0) & (df_group.curvHor < 0.001)]
  154. if not df_group_straight.empty:
  155. time_list.extend(df_group_straight['simTime'].values)
  156. frame_list.extend(df_group_straight['simFrame'].values)
  157. self.shake_count = self.shake_count + 1
  158. # 打转向灯,道路为直道,此时晃动判断标准车辆曲率变化率为一个更大的阈值
  159. df_group_change_lane = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'] < 0.001)]
  160. df_group_change_lane_data = df_group_change_lane[df_group_change_lane.cur_diff > Cr_diff + 0.2]
  161. if not df_group_change_lane_data.empty:
  162. time_list.extend(df_group_change_lane_data['simTime'].values)
  163. frame_list.extend(df_group_change_lane_data['simFrame'].values)
  164. self.shake_count = self.shake_count + 1
  165. # 转弯,打转向灯
  166. df_group_turn = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'].abs() > 0.001)]
  167. df_group_turn_data = df_group_turn[df_group_turn.cur_diff.abs() > Cr_diff + 0.1]
  168. if not df_group_turn_data.empty:
  169. time_list.extend(df_group_turn_data['simTime'].values)
  170. frame_list.extend(df_group_turn_data['simFrame'].values)
  171. self.shake_count = self.shake_count + 1
  172. TIME_RANGE = 1
  173. t_list = time_list
  174. f_list = frame_list
  175. group_time = []
  176. group_frame = []
  177. sub_group_time = []
  178. sub_group_frame = []
  179. for i in range(len(f_list)):
  180. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE:
  181. sub_group_time.append(t_list[i])
  182. sub_group_frame.append(f_list[i])
  183. else:
  184. group_time.append(sub_group_time)
  185. group_frame.append(sub_group_frame)
  186. sub_group_time = [t_list[i]]
  187. sub_group_frame = [f_list[i]]
  188. # 输出图表值
  189. shake_time = [[g[0], g[-1]] for g in group_time]
  190. shake_frame = [[g[0], g[-1]] for g in group_frame]
  191. self.shake_count = len(shake_time)
  192. if shake_time:
  193. # 保存晃动事件摘要
  194. time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
  195. frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
  196. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  197. discomfort_df['type'] = 'shake'
  198. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  199. return time_list
  200. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  201. """处理顿挫数据"""
  202. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  203. return np.nan
  204. elif abs(lon_acc) == 0:
  205. return 0
  206. elif lon_acc > 0 and lon_acc < ip_acc:
  207. return 1
  208. elif lon_acc < 0 and lon_acc > ip_dec:
  209. return -1
  210. else:
  211. return 0
  212. def _cadence_detector(self):
  213. """检测顿挫事件"""
  214. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence', 'v']].copy()
  215. time_list = data['simTime'].values.tolist()
  216. data = data[data['cadence'] != np.nan]
  217. data['cadence_diff'] = data['cadence'].diff()
  218. data.dropna(subset='cadence_diff', inplace=True)
  219. data = data[data['cadence_diff'] != 0]
  220. t_list = data['simTime'].values.tolist()
  221. f_list = data['simFrame'].values.tolist()
  222. TIME_RANGE = 1
  223. group_time = []
  224. group_frame = []
  225. sub_group_time = []
  226. sub_group_frame = []
  227. for i in range(len(f_list)):
  228. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE: # 特征点相邻一秒内的,算作同一组顿挫
  229. sub_group_time.append(t_list[i])
  230. sub_group_frame.append(f_list[i])
  231. else:
  232. group_time.append(sub_group_time)
  233. group_frame.append(sub_group_frame)
  234. sub_group_time = [t_list[i]]
  235. sub_group_frame = [f_list[i]]
  236. group_time.append(sub_group_time)
  237. group_frame.append(sub_group_frame)
  238. group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
  239. group_frame = [g for g in group_frame if len(g) >= 1]
  240. # 输出图表值
  241. cadence_time = [[g[0], g[-1]] for g in group_time]
  242. cadence_frame = [[g[0], g[-1]] for g in group_frame]
  243. if cadence_time:
  244. # 保存顿挫事件摘要
  245. time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
  246. frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
  247. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  248. discomfort_df['type'] = 'cadence'
  249. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  250. # 将顿挫组的起始时间为组重新统计时间
  251. cadence_time_list = [time for pair in cadence_time for time in self.ego_df['simTime'].values if pair[0] <= time <= pair[1]]
  252. self.cadence_count = len(cadence_time)
  253. return cadence_time_list
  254. def _slam_brake_detector(self):
  255. """检测急刹车事件"""
  256. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake', 'v']].copy()
  257. res_df = data[data['slam_brake'] == 1]
  258. t_list = res_df['simTime'].values
  259. f_list = res_df['simFrame'].values.tolist()
  260. TIME_RANGE = 1
  261. group_time = []
  262. group_frame = []
  263. sub_group_time = []
  264. sub_group_frame = []
  265. for i in range(len(f_list)):
  266. if not sub_group_time or f_list[i] - f_list[i - 1] <= TIME_RANGE: # 连续帧的算作同一组急刹
  267. sub_group_time.append(t_list[i])
  268. sub_group_frame.append(f_list[i])
  269. else:
  270. group_time.append(sub_group_time)
  271. group_frame.append(sub_group_frame)
  272. sub_group_time = [t_list[i]]
  273. sub_group_frame = [f_list[i]]
  274. group_time.append(sub_group_time)
  275. group_frame.append(sub_group_frame)
  276. group_time = [g for g in group_time if len(g) >= 2] # 达到两帧算作一次急刹
  277. group_frame = [g for g in group_frame if len(g) >= 2]
  278. # 输出图表值
  279. slam_brake_time = [[g[0], g[-1]] for g in group_time]
  280. slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
  281. if slam_brake_time:
  282. # 保存事件摘要
  283. time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
  284. frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
  285. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  286. discomfort_df['type'] = 'slam_brake'
  287. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  288. time_list = [element for sublist in group_time for element in sublist]
  289. self.slam_brake_count = len(group_time)
  290. return time_list
  291. def _slam_accel_detector(self):
  292. """检测急加速事件"""
  293. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel', 'v']].copy()
  294. res_df = data.loc[data['slam_accel'] == 1]
  295. t_list = res_df['simTime'].values
  296. f_list = res_df['simFrame'].values.tolist()
  297. group_time = []
  298. group_frame = []
  299. sub_group_time = []
  300. sub_group_frame = []
  301. for i in range(len(f_list)):
  302. if not group_time or f_list[i] - f_list[i - 1] <= 1: # 连续帧的算作同一组急加速
  303. sub_group_time.append(t_list[i])
  304. sub_group_frame.append(f_list[i])
  305. else:
  306. group_time.append(sub_group_time)
  307. group_frame.append(sub_group_frame)
  308. sub_group_time = [t_list[i]]
  309. sub_group_frame = [f_list[i]]
  310. group_time.append(sub_group_time)
  311. group_frame.append(sub_group_frame)
  312. group_time = [g for g in group_time if len(g) >= 2]
  313. group_frame = [g for g in group_frame if len(g) >= 2]
  314. # 输出图表值
  315. slam_accel_time = [[g[0], g[-1]] for g in group_time]
  316. slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
  317. if slam_accel_time:
  318. # 保存事件摘要
  319. time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
  320. frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
  321. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  322. discomfort_df['type'] = 'slam_accel'
  323. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  324. time_list = [element for sublist in group_time for element in sublist]
  325. self.slam_accel_count = len(group_time)
  326. return time_list
  327. def comf_statistic(self):
  328. """统计舒适性指标"""
  329. df = self.ego_df[['simTime', 'simFrame', 'cur_diff', 'lon_acc', 'lon_acc_roc', 'accelH', 'speedH', 'lat_acc', 'v']].copy()
  330. self.zigzag_count_func()
  331. self.cal_zigzag_strength_strength()
  332. if self.zigzag_time_list:
  333. # 保存 Weaving (zigzag) 事件摘要
  334. zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
  335. zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
  336. zigzag_df['type'] = 'zigzag'
  337. self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
  338. zigzag_t_list = []
  339. # 只有[t_start, t_end]数对,要提取为完整time list
  340. t_list = df['simTime'].values.tolist()
  341. for t_start, t_end in self.zigzag_time_list:
  342. index_1 = t_list.index(t_start)
  343. index_2 = t_list.index(t_end)
  344. zigzag_t_list.extend(t_list[index_1:index_2 + 1])
  345. zigzag_t_list = list(set(zigzag_t_list))
  346. shake_t_list = self._shake_detector()
  347. cadence_t_list = self._cadence_detector()
  348. slam_brake_t_list = self._slam_brake_detector()
  349. slam_accel_t_list = self._slam_accel_detector()
  350. # 统计结果
  351. self.calculated_value = {
  352. "weaving": self.zigzag_count,
  353. "shake": self.shake_count,
  354. "cadence": self.cadence_count,
  355. "slamBrake": self.slam_brake_count,
  356. "slamAccelerate": self.slam_accel_count
  357. }
  358. self.logger.info(f"舒适性计算完成,统计结果:{self.calculated_value}")
  359. return self.calculated_value
  360. def report_statistic(self):
  361. """生成舒适性评估报告"""
  362. comfort_result = self.comf_statistic()
  363. evaluator = Score(self.data_processed.comfort_config)
  364. result = evaluator.evaluate(comfort_result)
  365. print("\n[舒适性表现及得分情况]")
  366. return result