comfort.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. ##################################################################
  4. #
  5. # Copyright (c) 2023 CICV, Inc. All Rights Reserved
  6. #
  7. ##################################################################
  8. """
  9. @Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
  10. @Data: 2023/06/25
  11. @Last Modified: 2023/06/25
  12. @Summary: Comfort metrics
  13. """
  14. import sys
  15. import math
  16. import pandas as pd
  17. import numpy as np
  18. import scipy.signal
  19. from pathlib import Path
  20. from modules.lib.score import Score
  21. from modules.lib.common import get_interpolation, get_frame_with_time
  22. from modules.config import config
  23. from modules.lib import data_process
  24. from modules.lib.log_manager import LogManager
  25. def peak_valley_decorator(method):
  26. def wrapper(self, *args, **kwargs):
  27. peak_valley = self._peak_valley_determination(self.df)
  28. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  29. if len(pv_list) != 0:
  30. flag = True
  31. p_last = pv_list[0]
  32. for i in range(1, len(pv_list)):
  33. p_curr = pv_list[i]
  34. if self._peak_valley_judgment(p_last, p_curr):
  35. # method(self, p_curr, p_last)
  36. method(self, p_curr, p_last, flag, *args, **kwargs)
  37. else:
  38. p_last = p_curr
  39. return method
  40. else:
  41. flag = False
  42. p_curr = [0, 0]
  43. p_last = [0, 0]
  44. method(self, p_curr, p_last, flag, *args, **kwargs)
  45. return method
  46. return wrapper
  47. class Comfort(object):
  48. """
  49. Class for achieving comfort metrics for autonomous driving.
  50. Attributes:
  51. dataframe: Vehicle driving data, stored in dataframe format.
  52. """
  53. def __init__(self, data_processed):
  54. # self.logger = log.get_logger()
  55. self.eval_data = pd.DataFrame()
  56. self.data_processed = data_processed
  57. self.logger = LogManager().get_logger() # 获取全局日志实例
  58. self.data = data_processed.ego_data
  59. # self.mileage = data_processed.report_info['mileage']
  60. self.ego_df = pd.DataFrame()
  61. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  62. self.calculated_value = {
  63. 'Weaving': 0,
  64. 'shake': 0,
  65. 'cadence': 0,
  66. 'slamBrake': 0,
  67. 'slamAccelerate': 0,
  68. }
  69. # self.time_list = data_processed.driver_ctrl_data['time_list']
  70. # self.frame_list = data_processed.driver_ctrl_data['frame_list']
  71. self.time_list = self.data['simTime'].values.tolist()
  72. self.frame_list = self.data['simFrame'].values.tolist()
  73. self.count_dict = {}
  74. self.duration_dict = {}
  75. self.strength_dict = {}
  76. self.discomfort_count = 0
  77. self.zigzag_count = 0
  78. self.shake_count = 0
  79. self.cadence_count = 0
  80. self.slam_brake_count = 0
  81. self.slam_accel_count = 0
  82. self.zigzag_strength = 0
  83. self.shake_strength = 0
  84. self.cadence_strength = 0
  85. self.slam_brake_strength = 0
  86. self.slam_accel_strength = 0
  87. self.discomfort_duration = 0
  88. self.zigzag_duration = 0
  89. self.shake_duration = 0
  90. self.cadence_duration = 0
  91. self.slam_brake_duration = 0
  92. self.slam_accel_duration = 0
  93. self.zigzag_time_list = []
  94. self.zigzag_frame_list = []
  95. self.zigzag_stre_list = []
  96. self.cur_ego_path_list = []
  97. self.curvature_list = []
  98. self._get_data()
  99. self._comf_param_cal()
  100. def _get_data(self):
  101. """
  102. """
  103. self.ego_df = self.data[config.COMFORT_INFO].copy()
  104. self.df = self.ego_df.reset_index(drop=True) # 索引是csv原索引
  105. # def _cal_cur_ego_path(self, row):
  106. # try:
  107. # divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
  108. # if not divide:
  109. # res = None
  110. # else:
  111. # res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  112. # except:
  113. # res = None
  114. # return res
  115. import numpy as np
  116. def _cal_cur_ego_path(self, row):
  117. try:
  118. # 计算速度平方和,判断是否接近零
  119. speed_sq = row['speedX']**2 + row['speedY']**2
  120. if speed_sq < 1e-6: # 阈值根据实际场景调整
  121. return 1e5 # 速度接近零时返回极大曲率
  122. divide = speed_sq ** (3/2)
  123. res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  124. return res
  125. except Exception as e:
  126. return 1e5 # 异常时也返回极大值(如除零、缺失值等)
  127. def _comf_param_cal(self):
  128. """
  129. """
  130. # [log]
  131. self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
  132. self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
  133. self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
  134. lambda x: 1 if x < 0 else 0)
  135. self.ego_df['slam_accel'] = (self.ego_df['lon_acc'] - self.ego_df['ip_acc']).apply(
  136. lambda x: 1 if x > 0 else 0)
  137. self.ego_df['cadence'] = self.ego_df.apply(
  138. lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
  139. # for shake detector
  140. self.ego_df['cur_ego_path'] = self.ego_df.apply(self._cal_cur_ego_path, axis=1)
  141. self.ego_df['curvHor'] = self.ego_df['curvHor'].astype('float')
  142. self.ego_df['cur_diff'] = (self.ego_df['cur_ego_path'] - self.ego_df['curvHor']).abs()
  143. self.ego_df['R'] = self.ego_df['curvHor'].apply(lambda x: 10000 if x == 0 else 1 / x)
  144. self.ego_df['R_ego'] = self.ego_df['cur_ego_path'].apply(lambda x: 10000 if x == 0 else 1 / x)
  145. self.ego_df['R_diff'] = (self.ego_df['R_ego'] - self.ego_df['R']).abs()
  146. self.cur_ego_path_list = self.ego_df['cur_ego_path'].values.tolist()
  147. self.curvature_list = self.ego_df['curvHor'].values.tolist()
  148. def _peak_valley_determination(self, df):
  149. """
  150. Determine the peak and valley of the vehicle based on its current angular velocity.
  151. Parameters:
  152. df: Dataframe containing the vehicle angular velocity.
  153. Returns:
  154. peak_valley: List of indices representing peaks and valleys.
  155. """
  156. peaks, _ = scipy.signal.find_peaks(df['speedH'], height=0.01, distance=1, prominence=0.01)
  157. valleys, _ = scipy.signal.find_peaks(-df['speedH'], height=0.01, distance=1, prominence=0.01)
  158. peak_valley = sorted(list(peaks) + list(valleys))
  159. return peak_valley
  160. def _peak_valley_judgment(self, p_last, p_curr, tw=10000, avg=0.02):
  161. """
  162. Determine if the given peaks and valleys satisfy certain conditions.
  163. Parameters:
  164. p_last: Previous peak or valley data point.
  165. p_curr: Current peak or valley data point.
  166. tw: Threshold time difference between peaks and valleys.
  167. avg: Angular velocity gap threshold.
  168. Returns:
  169. Boolean indicating whether the conditions are satisfied.
  170. """
  171. t_diff = p_curr[0] - p_last[0]
  172. v_diff = abs(p_curr[1] - p_last[1])
  173. s = p_curr[1] * p_last[1]
  174. zigzag_flag = t_diff < tw and v_diff > avg and s < 0
  175. if zigzag_flag and ([p_last[0], p_curr[0]] not in self.zigzag_time_list):
  176. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  177. return zigzag_flag
  178. @peak_valley_decorator
  179. def zigzag_count_func(self, p_curr, p_last, flag=True):
  180. """
  181. Count the number of zigzag movements.
  182. Parameters:
  183. df: Input dataframe data.
  184. Returns:
  185. zigzag_count: Number of zigzag movements.
  186. """
  187. if flag:
  188. self.zigzag_count += 1
  189. else:
  190. self.zigzag_count += 0
  191. @peak_valley_decorator
  192. def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
  193. """
  194. Calculate various strength statistics.
  195. Returns:
  196. Tuple containing maximum strength, minimum strength,
  197. average strength, and 99th percentile strength.
  198. """
  199. if flag:
  200. v_diff = abs(p_curr[1] - p_last[1])
  201. t_diff = p_curr[0] - p_last[0]
  202. if t_diff > 0:
  203. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  204. else:
  205. self.zigzag_stre_list = []
  206. def _shake_detector(self, Cr_diff=0.05, T_diff=0.39):
  207. """
  208. ego车横向加速度ax;
  209. ego车轨迹横向曲率;
  210. ego车轨迹曲率变化率;
  211. ego车所在车lane曲率;
  212. ego车所在车lane曲率变化率;
  213. 转向灯(暂时存疑,可不用)Cr_diff = 0.1, T_diff = 0.04
  214. 求解曲率公式k(t) = (x'(t) * y''(t) - y'(t) * x''(t)) / ((x'(t))^2 + (y'(t))^2)^(3/2)
  215. """
  216. time_list = []
  217. frame_list = []
  218. shake_time_list = []
  219. df = self.ego_df.copy()
  220. df = df[df['cur_diff'] > Cr_diff]
  221. df['frame_ID_diff'] = df['simFrame'].diff() # 找出行车轨迹曲率与道路曲率之差大于阈值的数据段
  222. filtered_df = df[df.frame_ID_diff > T_diff] # 此处是用大间隔区分多次晃动情景 。
  223. row_numbers = filtered_df.index.tolist()
  224. cut_column = pd.cut(df.index, bins=row_numbers)
  225. grouped = df.groupby(cut_column)
  226. dfs = {}
  227. for name, group in grouped:
  228. dfs[name] = group.reset_index(drop=True)
  229. for name, df_group in dfs.items():
  230. # 直道,未主动换道
  231. df_group['curvHor'] = df_group['curvHor'].abs()
  232. df_group_straight = df_group[(df_group.lightMask == 0) & (df_group.curvHor < 0.001)]
  233. if not df_group_straight.empty:
  234. tmp_list = df_group_straight['simTime'].values
  235. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  236. time_list.extend(df_group_straight['simTime'].values)
  237. frame_list.extend(df_group_straight['simFrame'].values)
  238. self.shake_count = self.shake_count + 1
  239. # 打转向灯,道路为直道,此时晃动判断标准车辆曲率变化率为一个更大的阈值
  240. df_group_change_lane = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'] < 0.001)]
  241. df_group_change_lane_data = df_group_change_lane[df_group_change_lane.cur_diff > Cr_diff + 0.2]
  242. if not df_group_change_lane_data.empty:
  243. tmp_list = df_group_change_lane_data['simTime'].values
  244. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  245. time_list.extend(df_group_change_lane_data['simTime'].values)
  246. frame_list.extend(df_group_change_lane_data['simFrame'].values)
  247. self.shake_count = self.shake_count + 1
  248. # 转弯,打转向灯
  249. df_group_turn = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'].abs() > 0.001)]
  250. df_group_turn_data = df_group_turn[df_group_turn.cur_diff.abs() > Cr_diff + 0.1]
  251. if not df_group_turn_data.empty:
  252. tmp_list = df_group_turn_data['simTime'].values
  253. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  254. time_list.extend(df_group_turn_data['simTime'].values)
  255. frame_list.extend(df_group_turn_data['simFrame'].values)
  256. self.shake_count = self.shake_count + 1
  257. TIME_RANGE = 1
  258. t_list = time_list
  259. f_list = frame_list
  260. group_time = []
  261. group_frame = []
  262. sub_group_time = []
  263. sub_group_frame = []
  264. for i in range(len(f_list)):
  265. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE:
  266. sub_group_time.append(t_list[i])
  267. sub_group_frame.append(f_list[i])
  268. else:
  269. group_time.append(sub_group_time)
  270. group_frame.append(sub_group_frame)
  271. sub_group_time = [t_list[i]]
  272. sub_group_frame = [f_list[i]]
  273. # 输出图表值
  274. shake_time = [[g[0], g[-1]] for g in group_time]
  275. shake_frame = [[g[0], g[-1]] for g in group_frame]
  276. self.shake_count = len(shake_time)
  277. if shake_time:
  278. time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
  279. frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
  280. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  281. discomfort_df['type'] = 'shake'
  282. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  283. return time_list
  284. def _cadence_process(self, lon_acc_roc, ip_dec_roc):
  285. if abs(lon_acc_roc) >= abs(ip_dec_roc) or abs(lon_acc_roc) < 1:
  286. return np.nan
  287. # elif abs(lon_acc_roc) == 0:
  288. elif abs(lon_acc_roc) == 0:
  289. return 0
  290. elif lon_acc_roc > 0 and lon_acc_roc < -ip_dec_roc:
  291. return 1
  292. elif lon_acc_roc < 0 and lon_acc_roc > ip_dec_roc:
  293. return -1
  294. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  295. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  296. return np.nan
  297. # elif abs(lon_acc_roc) == 0:
  298. elif abs(lon_acc) == 0:
  299. return 0
  300. elif lon_acc > 0 and lon_acc < ip_acc:
  301. return 1
  302. elif lon_acc < 0 and lon_acc > ip_dec:
  303. return -1
  304. else:
  305. return 0
  306. def _cadence_detector(self):
  307. """
  308. # 加速度突变:先加后减,先减后加,先加然后停,先减然后停
  309. # 顿挫:2s内多次加速度变化率突变
  310. # 求出每一个特征点,然后提取,然后将每一个特征点后面的2s做一个窗口,统计频率,避免无效运算
  311. # 将特征点筛选出来
  312. # 将特征点时间作为聚类标准,大于1s的pass,小于等于1s的聚类到一个分组
  313. # 去掉小于3个特征点的分组
  314. """
  315. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'cadence']].copy()
  316. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence']].copy()
  317. time_list = data['simTime'].values.tolist()
  318. data = data[data['cadence'] != np.nan]
  319. data['cadence_diff'] = data['cadence'].diff()
  320. data.dropna(subset='cadence_diff', inplace=True)
  321. data = data[data['cadence_diff'] != 0]
  322. t_list = data['simTime'].values.tolist()
  323. f_list = data['simFrame'].values.tolist()
  324. TIME_RANGE = 1
  325. group_time = []
  326. group_frame = []
  327. sub_group_time = []
  328. sub_group_frame = []
  329. for i in range(len(f_list)):
  330. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE: # 特征点相邻一秒内的,算作同一组顿挫
  331. sub_group_time.append(t_list[i])
  332. sub_group_frame.append(f_list[i])
  333. else:
  334. group_time.append(sub_group_time)
  335. group_frame.append(sub_group_frame)
  336. sub_group_time = [t_list[i]]
  337. sub_group_frame = [f_list[i]]
  338. group_time.append(sub_group_time)
  339. group_frame.append(sub_group_frame)
  340. group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
  341. group_frame = [g for g in group_frame if len(g) >= 1]
  342. # 将顿挫组的起始时间为组重新统计时间
  343. # 输出图表值
  344. cadence_time = [[g[0], g[-1]] for g in group_time]
  345. cadence_frame = [[g[0], g[-1]] for g in group_frame]
  346. if cadence_time:
  347. time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
  348. frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
  349. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  350. discomfort_df['type'] = 'cadence'
  351. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  352. # 将顿挫组的起始时间为组重新统计时间
  353. cadence_time_list = [time for pair in cadence_time for time in time_list if pair[0] <= time <= pair[1]]
  354. # time_list = [element for sublist in group_time for element in sublist]
  355. # merged_list = [element for sublist in res_group for element in sublist]
  356. # res_df = data[data['simTime'].isin(merged_list)]
  357. stre_list = []
  358. freq_list = []
  359. for g in group_time:
  360. # calculate strength
  361. g_df = data[data['simTime'].isin(g)]
  362. strength = g_df['lon_acc'].abs().mean()
  363. stre_list.append(strength)
  364. # calculate frequency
  365. cnt = len(g)
  366. t_start = g_df['simTime'].iloc[0]
  367. t_end = g_df['simTime'].iloc[-1]
  368. t_delta = t_end - t_start
  369. frequency = cnt / t_delta
  370. freq_list.append(frequency)
  371. self.cadence_count = len(freq_list)
  372. cadence_stre = sum(stre_list) / len(stre_list) if stre_list else 0
  373. return cadence_time_list
  374. def _slam_brake_detector(self):
  375. # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
  376. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_dec_roc', 'slam_brake']].copy()
  377. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake']].copy()
  378. # data['slam_diff'] = data['slam_brake'].diff()
  379. # res_df = data[data['slam_diff'] == 1]
  380. res_df = data[data['slam_brake'] == 1]
  381. t_list = res_df['simTime'].values
  382. f_list = res_df['simFrame'].values.tolist()
  383. TIME_RANGE = 1
  384. group_time = []
  385. group_frame = []
  386. sub_group_time = []
  387. sub_group_frame = []
  388. for i in range(len(f_list)):
  389. if not sub_group_time or f_list[i] - f_list[i - 1] <= TIME_RANGE: # 连续帧的算作同一组急刹
  390. sub_group_time.append(t_list[i])
  391. sub_group_frame.append(f_list[i])
  392. else:
  393. group_time.append(sub_group_time)
  394. group_frame.append(sub_group_frame)
  395. sub_group_time = [t_list[i]]
  396. sub_group_frame = [f_list[i]]
  397. group_time.append(sub_group_time)
  398. group_frame.append(sub_group_frame)
  399. group_time = [g for g in group_time if len(g) >= 2] # 达到两帧算作一次急刹
  400. group_frame = [g for g in group_frame if len(g) >= 2]
  401. # 输出图表值
  402. slam_brake_time = [[g[0], g[-1]] for g in group_time]
  403. slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
  404. if slam_brake_time:
  405. time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
  406. frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
  407. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  408. discomfort_df['type'] = 'slam_brake'
  409. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  410. time_list = [element for sublist in group_time for element in sublist]
  411. self.slam_brake_count = len(group_time) # / self.mileage # * 1000000
  412. return time_list
  413. def _slam_accel_detector(self):
  414. # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
  415. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_acc_roc', 'slam_accel']].copy()
  416. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel']].copy()
  417. # data['slam_diff'] = data['slam_accel'].diff()
  418. # res_df = data.loc[data['slam_diff'] == 1]
  419. res_df = data.loc[data['slam_accel'] == 1]
  420. t_list = res_df['simTime'].values
  421. f_list = res_df['simFrame'].values.tolist()
  422. group_time = []
  423. group_frame = []
  424. sub_group_time = []
  425. sub_group_frame = []
  426. for i in range(len(f_list)):
  427. if not group_time or f_list[i] - f_list[i - 1] <= 1: # 连续帧的算作同一组急加速
  428. sub_group_time.append(t_list[i])
  429. sub_group_frame.append(f_list[i])
  430. else:
  431. group_time.append(sub_group_time)
  432. group_frame.append(sub_group_frame)
  433. sub_group_time = [t_list[i]]
  434. sub_group_frame = [f_list[i]]
  435. group_time.append(sub_group_time)
  436. group_frame.append(sub_group_frame)
  437. group_time = [g for g in group_time if len(g) >= 2]
  438. group_frame = [g for g in group_frame if len(g) >= 2]
  439. # 输出图表值
  440. slam_accel_time = [[g[0], g[-1]] for g in group_time]
  441. slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
  442. if slam_accel_time:
  443. time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
  444. frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
  445. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  446. discomfort_df['type'] = 'slam_accel'
  447. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  448. time_list = [element for sublist in group_time for element in sublist]
  449. self.slam_accel_count = len(group_time) # / self.mileage # * 1000000
  450. return time_list
  451. def comf_statistic(self):
  452. df = self.ego_df[['simTime', 'cur_diff', 'lon_acc', 'lon_acc_roc', 'accelH']].copy()
  453. self.zigzag_count_func()
  454. self.cal_zigzag_strength_strength()
  455. if self.zigzag_time_list:
  456. zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
  457. zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
  458. zigzag_df['type'] = 'zigzag'
  459. self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
  460. # discomfort_df = pd.concat([time_df, frame_df], axis=1)
  461. # self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  462. zigzag_t_list = []
  463. # 只有[t_start, t_end]数对,要提取为完整time list
  464. t_list = df['simTime'].values.tolist()
  465. for t_start, t_end in self.zigzag_time_list:
  466. index_1 = t_list.index(t_start)
  467. index_2 = t_list.index(t_end)
  468. zigzag_t_list.extend(t_list[index_1:index_2 + 1])
  469. zigzag_t_list = list(set(zigzag_t_list))
  470. shake_t_list = self._shake_detector()
  471. cadence_t_list = self._cadence_detector()
  472. slam_brake_t_list = self._slam_brake_detector()
  473. slam_accel_t_list = self._slam_accel_detector()
  474. discomfort_time_list = zigzag_t_list + shake_t_list + cadence_t_list + slam_brake_t_list + slam_accel_t_list
  475. discomfort_time_list = sorted(discomfort_time_list) # 排序
  476. discomfort_time_list = list(set(discomfort_time_list)) # 去重
  477. # TIME_DIFF = self.time_list[3] - self.time_list[2]
  478. # TIME_DIFF = 0.4
  479. FREQUENCY = 100
  480. TIME_DIFF = 1 / FREQUENCY
  481. self.discomfort_duration = len(discomfort_time_list) * TIME_DIFF
  482. df['flag_zigzag'] = df['simTime'].apply(lambda x: 1 if x in zigzag_t_list else 0)
  483. df['flag_shake'] = df['simTime'].apply(lambda x: 1 if x in shake_t_list else 0)
  484. df['flag_cadence'] = df['simTime'].apply(lambda x: 1 if x in cadence_t_list else 0)
  485. df['flag_slam_brake'] = df['simTime'].apply(lambda x: 1 if x in slam_brake_t_list else 0)
  486. df['flag_slam_accel'] = df['simTime'].apply(lambda x: 1 if x in slam_accel_t_list else 0)
  487. self.calculated_value = {
  488. "weaving": self.zigzag_count,
  489. "shake": self.shake_count,
  490. "cadence": self.cadence_count,
  491. "slamBrake": self.slam_brake_count,
  492. "slamAccelerate": self.slam_accel_count
  493. }
  494. self.logger.info(f"舒适性计算完成,统计结果:{self.calculated_value}")
  495. return self.calculated_value
  496. def _nan_detect(self, num):
  497. if math.isnan(num):
  498. return 0
  499. return num
  500. def zip_time_pairs(self, zip_list):
  501. zip_time_pairs = zip(self.time_list, zip_list)
  502. zip_vs_time = [[x, "" if math.isnan(y) else y] for x, y in zip_time_pairs]
  503. return zip_vs_time
  504. def report_statistic(self):
  505. comfort_result = self.comf_statistic()
  506. # comfort_config_path = self.config_path / "comfort_config.yaml" #"comfort_config.yaml" # "comfort_config.yaml"
  507. evaluator = Score(self.data_processed.comfort_config)
  508. result = evaluator.evaluate(comfort_result)
  509. print("\n[舒适性表现及得分情况]")
  510. return result
  511. if __name__ == '__main__':
  512. case_name = 'ICA'
  513. mode_label = 'PGVIL'
  514. data = data_process.DataPreprocessing(case_name, mode_label)
  515. comfort_instance = Comfort(data)
  516. # 调用实例方法 report_statistic,它不接受除 self 之外的参数
  517. try:
  518. comfort_result = comfort_instance.report_statistic()
  519. result = {'comfort': comfort_result}
  520. except Exception as e:
  521. print(f"An error occurred in Comfort.report_statistic: {e}")