comfort.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. ##################################################################
  4. #
  5. # Copyright (c) 2023 CICV, Inc. All Rights Reserved
  6. #
  7. ##################################################################
  8. """
  9. @Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
  10. @Data: 2023/06/25
  11. @Last Modified: 2023/06/25
  12. @Summary: Comfort metrics
  13. """
  14. import sys
  15. import math
  16. import pandas as pd
  17. import numpy as np
  18. import scipy.signal
  19. from pathlib import Path
  20. root_path = Path(__file__).resolve().parent.parent
  21. sys.path.append(str(root_path))
  22. sys.path.append('/home/kevin/kevin/zhaoyuan/evaluate_zhaoyuan/')
  23. print(sys.path)
  24. from models.common.score import Score
  25. from common.common import get_interpolation, get_frame_with_time
  26. from config import config
  27. from models.common import data_process
  28. from models.common import log # 确保这个路径是正确的,或者调整它
  29. log_path = config.LOG_PATH
  30. logger = log.get_logger(log_path)
  31. def peak_valley_decorator(method):
  32. def wrapper(self, *args, **kwargs):
  33. peak_valley = self._peak_valley_determination(self.df)
  34. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  35. if len(pv_list) != 0:
  36. flag = True
  37. p_last = pv_list[0]
  38. for i in range(1, len(pv_list)):
  39. p_curr = pv_list[i]
  40. if self._peak_valley_judgment(p_last, p_curr):
  41. # method(self, p_curr, p_last)
  42. method(self, p_curr, p_last, flag, *args, **kwargs)
  43. else:
  44. p_last = p_curr
  45. return method
  46. else:
  47. flag = False
  48. p_curr = [0, 0]
  49. p_last = [0, 0]
  50. method(self, p_curr, p_last, flag, *args, **kwargs)
  51. return method
  52. return wrapper
  53. class Comfort(object):
  54. """
  55. Class for achieving comfort metrics for autonomous driving.
  56. Attributes:
  57. dataframe: Vehicle driving data, stored in dataframe format.
  58. """
  59. def __init__(self, data_processed):
  60. self.eval_data = pd.DataFrame()
  61. self.data_processed = data_processed
  62. self.data = data_processed.obj_data[1]
  63. self.mileage = data_processed.report_info['mileage']
  64. self.ego_df = pd.DataFrame()
  65. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  66. self.df_drivectrl = data_processed.driver_ctrl_df
  67. self.calculated_value = {
  68. 'Weaving': 0,
  69. 'shake': 0,
  70. 'cadence': 0,
  71. 'slamBrake': 0,
  72. 'slamAccelerate': 0,
  73. }
  74. self.metric_list = config.COMFORT_METRIC_LIST
  75. self.time_list = data_processed.driver_ctrl_data['time_list']
  76. self.frame_list = data_processed.driver_ctrl_data['frame_list']
  77. self.count_dict = {}
  78. self.duration_dict = {}
  79. self.strength_dict = {}
  80. self.discomfort_count = 0
  81. self.zigzag_count = 0
  82. self.shake_count = 0
  83. self.cadence_count = 0
  84. self.slam_brake_count = 0
  85. self.slam_accel_count = 0
  86. self.zigzag_strength = 0
  87. self.shake_strength = 0
  88. self.cadence_strength = 0
  89. self.slam_brake_strength = 0
  90. self.slam_accel_strength = 0
  91. self.discomfort_duration = 0
  92. self.zigzag_duration = 0
  93. self.shake_duration = 0
  94. self.cadence_duration = 0
  95. self.slam_brake_duration = 0
  96. self.slam_accel_duration = 0
  97. self.zigzag_time_list = []
  98. self.zigzag_frame_list = []
  99. self.zigzag_stre_list = []
  100. self.cur_ego_path_list = []
  101. self.curvature_list = []
  102. self._get_data()
  103. self._comf_param_cal()
  104. def _get_data(self):
  105. """
  106. """
  107. self.ego_df = self.data[config.COMFORT_INFO].copy()
  108. self.df = self.ego_df.reset_index(drop=True) # 索引是csv原索引
  109. def _cal_cur_ego_path(self, row):
  110. try:
  111. divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
  112. if not divide:
  113. res = None
  114. else:
  115. res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  116. except:
  117. res = None
  118. return res
  119. def _comf_param_cal(self):
  120. """
  121. """
  122. # [log]
  123. self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
  124. self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
  125. self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
  126. lambda x: 1 if x < 0 else 0)
  127. self.ego_df['slam_accel'] = (self.ego_df['lon_acc'] - self.ego_df['ip_acc']).apply(
  128. lambda x: 1 if x > 0 else 0)
  129. self.ego_df['cadence'] = self.ego_df.apply(
  130. lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
  131. # for shake detector
  132. self.ego_df['cur_ego_path'] = self.ego_df.apply(self._cal_cur_ego_path, axis=1)
  133. self.ego_df['curvHor'] = self.ego_df['curvHor'].astype('float')
  134. self.ego_df['cur_diff'] = (self.ego_df['cur_ego_path'] - self.ego_df['curvHor']).abs()
  135. self.ego_df['R'] = self.ego_df['curvHor'].apply(lambda x: 10000 if x == 0 else 1 / x)
  136. self.ego_df['R_ego'] = self.ego_df['cur_ego_path'].apply(lambda x: 10000 if x == 0 else 1 / x)
  137. self.ego_df['R_diff'] = (self.ego_df['R_ego'] - self.ego_df['R']).abs()
  138. self.cur_ego_path_list = self.ego_df['cur_ego_path'].values.tolist()
  139. self.curvature_list = self.ego_df['curvHor'].values.tolist()
  140. def _peak_valley_determination(self, df):
  141. """
  142. Determine the peak and valley of the vehicle based on its current angular velocity.
  143. Parameters:
  144. df: Dataframe containing the vehicle angular velocity.
  145. Returns:
  146. peak_valley: List of indices representing peaks and valleys.
  147. """
  148. peaks, _ = scipy.signal.find_peaks(df['speedH'], height=0.01, distance=1, prominence=0.01)
  149. valleys, _ = scipy.signal.find_peaks(-df['speedH'], height=0.01, distance=1, prominence=0.01)
  150. peak_valley = sorted(list(peaks) + list(valleys))
  151. return peak_valley
  152. def _peak_valley_judgment(self, p_last, p_curr, tw=10000, avg=0.02):
  153. """
  154. Determine if the given peaks and valleys satisfy certain conditions.
  155. Parameters:
  156. p_last: Previous peak or valley data point.
  157. p_curr: Current peak or valley data point.
  158. tw: Threshold time difference between peaks and valleys.
  159. avg: Angular velocity gap threshold.
  160. Returns:
  161. Boolean indicating whether the conditions are satisfied.
  162. """
  163. t_diff = p_curr[0] - p_last[0]
  164. v_diff = abs(p_curr[1] - p_last[1])
  165. s = p_curr[1] * p_last[1]
  166. zigzag_flag = t_diff < tw and v_diff > avg and s < 0
  167. if zigzag_flag and ([p_last[0], p_curr[0]] not in self.zigzag_time_list):
  168. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  169. return zigzag_flag
  170. @peak_valley_decorator
  171. def zigzag_count_func(self, p_curr, p_last, flag=True):
  172. """
  173. Count the number of zigzag movements.
  174. Parameters:
  175. df: Input dataframe data.
  176. Returns:
  177. zigzag_count: Number of zigzag movements.
  178. """
  179. if flag:
  180. self.zigzag_count += 1
  181. else:
  182. self.zigzag_count += 0
  183. @peak_valley_decorator
  184. def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
  185. """
  186. Calculate various strength statistics.
  187. Returns:
  188. Tuple containing maximum strength, minimum strength,
  189. average strength, and 99th percentile strength.
  190. """
  191. if flag:
  192. v_diff = abs(p_curr[1] - p_last[1])
  193. t_diff = p_curr[0] - p_last[0]
  194. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  195. else:
  196. self.zigzag_stre_list = []
  197. def _shake_detector(self, Cr_diff=0.05, T_diff=0.39):
  198. """
  199. ego车横向加速度ax;
  200. ego车轨迹横向曲率;
  201. ego车轨迹曲率变化率;
  202. ego车所在车lane曲率;
  203. ego车所在车lane曲率变化率;
  204. 转向灯(暂时存疑,可不用)Cr_diff = 0.1, T_diff = 0.04
  205. 求解曲率公式k(t) = (x'(t) * y''(t) - y'(t) * x''(t)) / ((x'(t))^2 + (y'(t))^2)^(3/2)
  206. """
  207. time_list = []
  208. frame_list = []
  209. shake_time_list = []
  210. df = self.ego_df.copy()
  211. df = df[df['cur_diff'] > Cr_diff]
  212. df['frame_ID_diff'] = df['simFrame'].diff() # 找出行车轨迹曲率与道路曲率之差大于阈值的数据段
  213. filtered_df = df[df.frame_ID_diff > T_diff] # 此处是用大间隔区分多次晃动情景 。
  214. row_numbers = filtered_df.index.tolist()
  215. cut_column = pd.cut(df.index, bins=row_numbers)
  216. grouped = df.groupby(cut_column)
  217. dfs = {}
  218. for name, group in grouped:
  219. dfs[name] = group.reset_index(drop=True)
  220. for name, df_group in dfs.items():
  221. # 直道,未主动换道
  222. df_group['curvHor'] = df_group['curvHor'].abs()
  223. df_group_straight = df_group[(df_group.lightMask == 0) & (df_group.curvHor < 0.001)]
  224. if not df_group_straight.empty:
  225. tmp_list = df_group_straight['simTime'].values
  226. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  227. time_list.extend(df_group_straight['simTime'].values)
  228. frame_list.extend(df_group_straight['simFrame'].values)
  229. self.shake_count = self.shake_count + 1
  230. # 打转向灯,道路为直道,此时晃动判断标准车辆曲率变化率为一个更大的阈值
  231. df_group_change_lane = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'] < 0.001)]
  232. df_group_change_lane_data = df_group_change_lane[df_group_change_lane.cur_diff > Cr_diff + 0.2]
  233. if not df_group_change_lane_data.empty:
  234. tmp_list = df_group_change_lane_data['simTime'].values
  235. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  236. time_list.extend(df_group_change_lane_data['simTime'].values)
  237. frame_list.extend(df_group_change_lane_data['simFrame'].values)
  238. self.shake_count = self.shake_count + 1
  239. # 转弯,打转向灯
  240. df_group_turn = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'].abs() > 0.001)]
  241. df_group_turn_data = df_group_turn[df_group_turn.cur_diff.abs() > Cr_diff + 0.1]
  242. if not df_group_turn_data.empty:
  243. tmp_list = df_group_turn_data['simTime'].values
  244. # shake_time_list.append([tmp_list[0], tmp_list[-1]])
  245. time_list.extend(df_group_turn_data['simTime'].values)
  246. frame_list.extend(df_group_turn_data['simFrame'].values)
  247. self.shake_count = self.shake_count + 1
  248. TIME_RANGE = 1
  249. t_list = time_list
  250. f_list = frame_list
  251. group_time = []
  252. group_frame = []
  253. sub_group_time = []
  254. sub_group_frame = []
  255. for i in range(len(f_list)):
  256. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE:
  257. sub_group_time.append(t_list[i])
  258. sub_group_frame.append(f_list[i])
  259. else:
  260. group_time.append(sub_group_time)
  261. group_frame.append(sub_group_frame)
  262. sub_group_time = [t_list[i]]
  263. sub_group_frame = [f_list[i]]
  264. # 输出图表值
  265. shake_time = [[g[0], g[-1]] for g in group_time]
  266. shake_frame = [[g[0], g[-1]] for g in group_frame]
  267. self.shake_count = len(shake_time)
  268. if shake_time:
  269. time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
  270. frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
  271. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  272. discomfort_df['type'] = 'shake'
  273. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  274. return time_list
  275. def _cadence_process(self, lon_acc_roc, ip_dec_roc):
  276. if abs(lon_acc_roc) >= abs(ip_dec_roc) or abs(lon_acc_roc) < 1:
  277. return np.nan
  278. # elif abs(lon_acc_roc) == 0:
  279. elif abs(lon_acc_roc) == 0:
  280. return 0
  281. elif lon_acc_roc > 0 and lon_acc_roc < -ip_dec_roc:
  282. return 1
  283. elif lon_acc_roc < 0 and lon_acc_roc > ip_dec_roc:
  284. return -1
  285. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  286. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  287. return np.nan
  288. # elif abs(lon_acc_roc) == 0:
  289. elif abs(lon_acc) == 0:
  290. return 0
  291. elif lon_acc > 0 and lon_acc < ip_acc:
  292. return 1
  293. elif lon_acc < 0 and lon_acc > ip_dec:
  294. return -1
  295. else:
  296. return 0
  297. def _cadence_detector(self):
  298. """
  299. # 加速度突变:先加后减,先减后加,先加然后停,先减然后停
  300. # 顿挫:2s内多次加速度变化率突变
  301. # 求出每一个特征点,然后提取,然后将每一个特征点后面的2s做一个窗口,统计频率,避免无效运算
  302. # 将特征点筛选出来
  303. # 将特征点时间作为聚类标准,大于1s的pass,小于等于1s的聚类到一个分组
  304. # 去掉小于3个特征点的分组
  305. """
  306. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'cadence']].copy()
  307. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence']].copy()
  308. time_list = data['simTime'].values.tolist()
  309. data = data[data['cadence'] != np.nan]
  310. data['cadence_diff'] = data['cadence'].diff()
  311. data.dropna(subset='cadence_diff', inplace=True)
  312. data = data[data['cadence_diff'] != 0]
  313. t_list = data['simTime'].values.tolist()
  314. f_list = data['simFrame'].values.tolist()
  315. TIME_RANGE = 1
  316. group_time = []
  317. group_frame = []
  318. sub_group_time = []
  319. sub_group_frame = []
  320. for i in range(len(f_list)):
  321. if not sub_group_time or t_list[i] - t_list[i - 1] <= TIME_RANGE: # 特征点相邻一秒内的,算作同一组顿挫
  322. sub_group_time.append(t_list[i])
  323. sub_group_frame.append(f_list[i])
  324. else:
  325. group_time.append(sub_group_time)
  326. group_frame.append(sub_group_frame)
  327. sub_group_time = [t_list[i]]
  328. sub_group_frame = [f_list[i]]
  329. group_time.append(sub_group_time)
  330. group_frame.append(sub_group_frame)
  331. group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
  332. group_frame = [g for g in group_frame if len(g) >= 1]
  333. # 将顿挫组的起始时间为组重新统计时间
  334. # 输出图表值
  335. cadence_time = [[g[0], g[-1]] for g in group_time]
  336. cadence_frame = [[g[0], g[-1]] for g in group_frame]
  337. if cadence_time:
  338. time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
  339. frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
  340. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  341. discomfort_df['type'] = 'cadence'
  342. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  343. # 将顿挫组的起始时间为组重新统计时间
  344. cadence_time_list = [time for pair in cadence_time for time in time_list if pair[0] <= time <= pair[1]]
  345. # time_list = [element for sublist in group_time for element in sublist]
  346. # merged_list = [element for sublist in res_group for element in sublist]
  347. # res_df = data[data['simTime'].isin(merged_list)]
  348. stre_list = []
  349. freq_list = []
  350. for g in group_time:
  351. # calculate strength
  352. g_df = data[data['simTime'].isin(g)]
  353. strength = g_df['lon_acc'].abs().mean()
  354. stre_list.append(strength)
  355. # calculate frequency
  356. cnt = len(g)
  357. t_start = g_df['simTime'].iloc[0]
  358. t_end = g_df['simTime'].iloc[-1]
  359. t_delta = t_end - t_start
  360. frequency = cnt / t_delta
  361. freq_list.append(frequency)
  362. self.cadence_count = len(freq_list)
  363. cadence_stre = sum(stre_list) / len(stre_list) if stre_list else 0
  364. return cadence_time_list
  365. def _slam_brake_detector(self):
  366. # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
  367. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_dec_roc', 'slam_brake']].copy()
  368. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake']].copy()
  369. # data['slam_diff'] = data['slam_brake'].diff()
  370. # res_df = data[data['slam_diff'] == 1]
  371. res_df = data[data['slam_brake'] == 1]
  372. t_list = res_df['simTime'].values
  373. f_list = res_df['simFrame'].values.tolist()
  374. TIME_RANGE = 1
  375. group_time = []
  376. group_frame = []
  377. sub_group_time = []
  378. sub_group_frame = []
  379. for i in range(len(f_list)):
  380. if not sub_group_time or f_list[i] - f_list[i - 1] <= TIME_RANGE: # 连续帧的算作同一组急刹
  381. sub_group_time.append(t_list[i])
  382. sub_group_frame.append(f_list[i])
  383. else:
  384. group_time.append(sub_group_time)
  385. group_frame.append(sub_group_frame)
  386. sub_group_time = [t_list[i]]
  387. sub_group_frame = [f_list[i]]
  388. group_time.append(sub_group_time)
  389. group_frame.append(sub_group_frame)
  390. group_time = [g for g in group_time if len(g) >= 2] # 达到两帧算作一次急刹
  391. group_frame = [g for g in group_frame if len(g) >= 2]
  392. # 输出图表值
  393. slam_brake_time = [[g[0], g[-1]] for g in group_time]
  394. slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
  395. if slam_brake_time:
  396. time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
  397. frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
  398. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  399. discomfort_df['type'] = 'slam_brake'
  400. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  401. time_list = [element for sublist in group_time for element in sublist]
  402. self.slam_brake_count = len(group_time) # / self.mileage # * 1000000
  403. return time_list
  404. def _slam_accel_detector(self):
  405. # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
  406. # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_acc_roc', 'slam_accel']].copy()
  407. data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel']].copy()
  408. # data['slam_diff'] = data['slam_accel'].diff()
  409. # res_df = data.loc[data['slam_diff'] == 1]
  410. res_df = data.loc[data['slam_accel'] == 1]
  411. t_list = res_df['simTime'].values
  412. f_list = res_df['simFrame'].values.tolist()
  413. group_time = []
  414. group_frame = []
  415. sub_group_time = []
  416. sub_group_frame = []
  417. for i in range(len(f_list)):
  418. if not group_time or f_list[i] - f_list[i - 1] <= 1: # 连续帧的算作同一组急加速
  419. sub_group_time.append(t_list[i])
  420. sub_group_frame.append(f_list[i])
  421. else:
  422. group_time.append(sub_group_time)
  423. group_frame.append(sub_group_frame)
  424. sub_group_time = [t_list[i]]
  425. sub_group_frame = [f_list[i]]
  426. group_time.append(sub_group_time)
  427. group_frame.append(sub_group_frame)
  428. group_time = [g for g in group_time if len(g) >= 2]
  429. group_frame = [g for g in group_frame if len(g) >= 2]
  430. # 输出图表值
  431. slam_accel_time = [[g[0], g[-1]] for g in group_time]
  432. slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
  433. if slam_accel_time:
  434. time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
  435. frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
  436. discomfort_df = pd.concat([time_df, frame_df], axis=1)
  437. discomfort_df['type'] = 'slam_accel'
  438. self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  439. time_list = [element for sublist in group_time for element in sublist]
  440. self.slam_accel_count = len(group_time) # / self.mileage # * 1000000
  441. return time_list
  442. def comf_statistic(self):
  443. df = self.ego_df[['simTime', 'cur_diff', 'lon_acc', 'lon_acc_roc', 'accelH']].copy()
  444. self.zigzag_count_func()
  445. self.cal_zigzag_strength_strength()
  446. if self.zigzag_time_list:
  447. zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
  448. zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
  449. zigzag_df['type'] = 'zigzag'
  450. self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
  451. # discomfort_df = pd.concat([time_df, frame_df], axis=1)
  452. # self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
  453. zigzag_t_list = []
  454. # 只有[t_start, t_end]数对,要提取为完整time list
  455. t_list = df['simTime'].values.tolist()
  456. for t_start, t_end in self.zigzag_time_list:
  457. index_1 = t_list.index(t_start)
  458. index_2 = t_list.index(t_end)
  459. zigzag_t_list.extend(t_list[index_1:index_2 + 1])
  460. zigzag_t_list = list(set(zigzag_t_list))
  461. shake_t_list = self._shake_detector()
  462. cadence_t_list = self._cadence_detector()
  463. slam_brake_t_list = self._slam_brake_detector()
  464. slam_accel_t_list = self._slam_accel_detector()
  465. discomfort_time_list = zigzag_t_list + shake_t_list + cadence_t_list + slam_brake_t_list + slam_accel_t_list
  466. discomfort_time_list = sorted(discomfort_time_list) # 排序
  467. discomfort_time_list = list(set(discomfort_time_list)) # 去重
  468. # TIME_DIFF = self.time_list[3] - self.time_list[2]
  469. # TIME_DIFF = 0.4
  470. FREQUENCY = 100
  471. TIME_DIFF = 1 / FREQUENCY
  472. self.discomfort_duration = len(discomfort_time_list) * TIME_DIFF
  473. df['flag_zigzag'] = df['simTime'].apply(lambda x: 1 if x in zigzag_t_list else 0)
  474. df['flag_shake'] = df['simTime'].apply(lambda x: 1 if x in shake_t_list else 0)
  475. df['flag_cadence'] = df['simTime'].apply(lambda x: 1 if x in cadence_t_list else 0)
  476. df['flag_slam_brake'] = df['simTime'].apply(lambda x: 1 if x in slam_brake_t_list else 0)
  477. df['flag_slam_accel'] = df['simTime'].apply(lambda x: 1 if x in slam_accel_t_list else 0)
  478. self.calculated_value = {
  479. "weaving": self.zigzag_count,
  480. "shake": self.shake_count,
  481. "cadence": self.cadence_count,
  482. "slamBrake": self.slam_brake_count,
  483. "slamAccelerate": self.slam_accel_count
  484. }
  485. return self.calculated_value
  486. def _nan_detect(self, num):
  487. if math.isnan(num):
  488. return 0
  489. return num
  490. def zip_time_pairs(self, zip_list):
  491. zip_time_pairs = zip(self.time_list, zip_list)
  492. zip_vs_time = [[x, "" if math.isnan(y) else y] for x, y in zip_time_pairs]
  493. return zip_vs_time
  494. def report_statistic(self):
  495. comfort_result = self.comf_statistic()
  496. evaluator = Score(config.COMFORT_CONFIG_PATH)
  497. result = evaluator.evaluate(comfort_result)
  498. print(f'Comfort Result:{self.calculated_value}')
  499. return result
  500. if __name__ == '__main__':
  501. case_name = 'ICA'
  502. mode_label = 'PGVIL'
  503. data = data_process.DataPreprocessing(case_name, mode_label)
  504. comfort_instance = Comfort(data)
  505. # 调用实例方法 report_statistic,它不接受除 self 之外的参数
  506. try:
  507. comfort_result = comfort_instance.report_statistic()
  508. result = {'comfort': comfort_result}
  509. print(result)
  510. except Exception as e:
  511. print(f"An error occurred in Comfort.report_statistic: {e}")