comfort.py 53 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. ##################################################################
  4. #
  5. # Copyright (c) 2023 CICV, Inc. All Rights Reserved
  6. #
  7. ##################################################################
  8. """
  9. @Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
  10. @Data: 2023/06/25
  11. @Last Modified: 2025/04/25
  12. @Summary: Comfort metrics
  13. """
  14. import sys
  15. import math
  16. import scipy.signal
  17. import pandas as pd
  18. import numpy as np
  19. from pathlib import Path
  20. from typing import Dict, List, Any, Optional, Callable, Union, Tuple
  21. from modules.lib.score import Score
  22. from modules.lib.common import get_interpolation, get_frame_with_time
  23. from modules.lib import data_process
  24. from modules.lib.log_manager import LogManager
  25. # 更新COMFORT_INFO列表,添加posH字段
  26. COMFORT_INFO = [
  27. "simTime",
  28. "simFrame",
  29. "speedX",
  30. "speedY",
  31. "accelX",
  32. "accelY",
  33. "curvHor",
  34. "lightMask",
  35. "v",
  36. "lat_acc",
  37. "lon_acc",
  38. "time_diff",
  39. "lon_acc_diff",
  40. "lon_acc_roc",
  41. "speedH",
  42. "accelH",
  43. "posH", # 添加航向角字段
  44. ]
  45. # ----------------------
  46. # 独立指标计算函数
  47. # ----------------------
  48. def motionComfortIndex(data_processed) -> dict:
  49. """计算运动舒适度指数"""
  50. comfort = Comfort(data_processed)
  51. index = comfort.calculate_motion_comfort_index()
  52. return {"motionComfortIndex": float(index)}
  53. def rideQualityScore(data_processed) -> dict:
  54. """计算乘坐质量评分"""
  55. comfort = Comfort(data_processed)
  56. score = comfort.calculate_ride_quality_score()
  57. return {"rideQualityScore": float(score)}
  58. def calculate_motionsickness(data_processed) -> dict:
  59. """计算晕车概率指标"""
  60. comfort = ComfortCalculator(data_processed)
  61. motion_sickness_prob = comfort.calculate_motion_sickness_probability()
  62. return {"motionSickness": float(motion_sickness_prob)}
  63. def calculate_vdv(data_processed) -> dict:
  64. """计算振动剂量值(Vibration Dose Value, VDV)指标"""
  65. comfort = ComfortCalculator(data_processed)
  66. vdv_value = comfort.calculate_vdv()
  67. return {"vdv": float(vdv_value)}
  68. def calculate_ava_vav(data_processed) -> dict:
  69. """计算多维度综合加权加速度"""
  70. comfort = ComfortCalculator(data_processed)
  71. ava_vav_value = comfort.calculate_ava_vav()
  72. return {"ava_vav": float(ava_vav_value)}
  73. def calculate_msdv(data_processed) -> dict:
  74. """计算晕动剂量值(MSDV)指标"""
  75. comfort = ComfortCalculator(data_processed)
  76. msdv_value = comfort.calculate_msdv()
  77. return {"msdv": float(msdv_value)}
  78. def calculate_weaving(data_processed) -> dict:
  79. """计算蛇行指标"""
  80. comfort = ComfortCalculator(data_processed)
  81. zigzag_count = comfort.calculate_zigzag_count()
  82. return {"weaving": float(zigzag_count)}
  83. def calculate_shake(data_processed) -> dict:
  84. """计算晃动指标"""
  85. comfort = ComfortCalculator(data_processed)
  86. shake_count = comfort.calculate_shake_count()
  87. return {"shake": float(shake_count)}
  88. def calculate_cadence(data_processed) -> dict:
  89. """计算顿挫指标"""
  90. comfort = ComfortCalculator(data_processed)
  91. cadence_count = comfort.calculate_cadence_count()
  92. return {"cadence": float(cadence_count)}
  93. def calculate_slambrake(data_processed) -> dict:
  94. """计算急刹车指标"""
  95. comfort = ComfortCalculator(data_processed)
  96. slam_brake_count = comfort.calculate_slam_brake_count()
  97. return {"slamBrake": float(slam_brake_count)}
  98. def calculate_slamaccelerate(data_processed) -> dict:
  99. """计算急加速指标"""
  100. comfort = ComfortCalculator(data_processed)
  101. slam_accel_count = comfort.calculate_slam_accel_count()
  102. return {"slamAccelerate": float(slam_accel_count)}
  103. # 装饰器保持不变
  104. def peak_valley_decorator(method):
  105. def wrapper(self, *args, **kwargs):
  106. peak_valley = self._peak_valley_determination(self.df)
  107. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  108. if len(pv_list) != 0:
  109. flag = True
  110. p_last = pv_list[0]
  111. for i in range(1, len(pv_list)):
  112. p_curr = pv_list[i]
  113. if self._peak_valley_judgment(p_last, p_curr):
  114. # method(self, p_curr, p_last)
  115. method(self, p_curr, p_last, flag, *args, **kwargs)
  116. else:
  117. p_last = p_curr
  118. return method
  119. else:
  120. flag = False
  121. p_curr = [0, 0]
  122. p_last = [0, 0]
  123. method(self, p_curr, p_last, flag, *args, **kwargs)
  124. return method
  125. return wrapper
  126. class ComfortRegistry:
  127. """舒适性指标注册器"""
  128. def __init__(self, data_processed):
  129. self.logger = LogManager().get_logger() # 获取全局日志实例
  130. self.data = data_processed
  131. self.comfort_config = data_processed.comfort_config["comfort"]
  132. self.metrics = self._extract_metrics(self.comfort_config)
  133. self._registry = self._build_registry()
  134. def _extract_metrics(self, config_node: dict) -> list:
  135. """DFS遍历提取指标"""
  136. metrics = []
  137. def _recurse(node):
  138. if isinstance(node, dict):
  139. if 'name' in node and not any(isinstance(v, dict) for v in node.values()):
  140. metrics.append(node['name'])
  141. for v in node.values():
  142. _recurse(v)
  143. _recurse(config_node)
  144. self.logger.info(f'评比的舒适性指标列表:{metrics}')
  145. return metrics
  146. def _build_registry(self) -> dict:
  147. """自动注册指标函数"""
  148. registry = {}
  149. for metric_name in self.metrics:
  150. func_name = f"calculate_{metric_name.lower()}"
  151. try:
  152. registry[metric_name] = globals()[func_name]
  153. except KeyError:
  154. self.logger.error(f"未实现指标函数: {func_name}")
  155. return registry
  156. def batch_execute(self) -> dict:
  157. """批量执行指标计算"""
  158. results = {}
  159. for name, func in self._registry.items():
  160. try:
  161. result = func(self.data)
  162. results.update(result)
  163. # 新增:将每个指标的结果写入日志
  164. self.logger.info(f'舒适性指标[{name}]计算结果: {result}')
  165. except Exception as e:
  166. self.logger.error(f"{name} 执行失败: {str(e)}", exc_info=True)
  167. results[name] = None
  168. self.logger.info(f'舒适性指标计算结果:{results}')
  169. return results
  170. class ComfortCalculator:
  171. """舒适性指标计算类 - 提供核心计算功能"""
  172. def __init__(self, data_processed):
  173. self.data_processed = data_processed
  174. self.logger = LogManager().get_logger()
  175. self.data = data_processed.ego_data
  176. self.ego_df = pd.DataFrame()
  177. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  178. # 统计指标
  179. self.calculated_value = {
  180. 'weaving': 0,
  181. 'shake': 0,
  182. 'cadence': 0,
  183. 'slamBrake': 0,
  184. 'slamAccelerate': 0,
  185. 'ava_vav': 0, # 添加新指标的默认值
  186. 'msdv': 0, # 添加MSDV指标的默认值
  187. 'motionSickness': 0, # 添加晕车概率指标的默认值
  188. 'vdt:': 0,
  189. 'motionComfortIndex': 0, # 新增指标
  190. 'rideQualityScore': 0 # 新增指标
  191. }
  192. self.time_list = self.data['simTime'].values.tolist()
  193. self.frame_list = self.data['simFrame'].values.tolist()
  194. self.zigzag_count = 0
  195. self.shake_count = 0
  196. self.cadence_count = 0
  197. self.slam_brake_count = 0
  198. self.slam_accel_count = 0
  199. self.zigzag_time_list = []
  200. self.zigzag_stre_list = []
  201. self._initialize_data()
  202. def _initialize_data(self):
  203. """初始化数据"""
  204. self.ego_df = self.data[COMFORT_INFO].copy()
  205. self.df = self.ego_df.reset_index(drop=True)
  206. self._prepare_comfort_parameters()
  207. def _prepare_comfort_parameters(self):
  208. """准备舒适性计算所需参数"""
  209. # 计算加减速阈值
  210. self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
  211. self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
  212. self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
  213. lambda x: 1 if x < 0 else 0)
  214. self.ego_df['slam_accel'] = (self.ego_df['lon_acc'] - self.ego_df['ip_acc']).apply(
  215. lambda x: 1 if x > 0 else 0)
  216. self.ego_df['cadence'] = self.ego_df.apply(
  217. lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
  218. def _apply_frequency_weighting(self, acceleration_data, weighting_type='Wk', fs=100):
  219. """应用ISO 2631-1:1997标准的频率加权滤波
  220. 参数:
  221. acceleration_data: 加速度时间序列数据
  222. weighting_type: 加权类型,可选值包括:
  223. - 'Wk': 垂直方向(Z轴)加权
  224. - 'Wd': 水平方向(X和Y轴)加权
  225. - 'Wf': 运动病相关加权
  226. fs: 采样频率(Hz)
  227. 返回:
  228. 加权后的加速度数据
  229. """
  230. # 检查数据有效性
  231. if acceleration_data.empty or acceleration_data.isna().all():
  232. return acceleration_data
  233. # 根据ISO 2631-1:1997标准设计滤波器
  234. # 这些参数来自标准文档,用于构建数字滤波器
  235. if weighting_type == 'Wk': # 垂直方向(Z轴)
  236. # Wk滤波器参数
  237. f1 = 0.4
  238. f2 = 100.0
  239. f3 = 12.5
  240. f4 = 12.5
  241. Q1 = 0.63
  242. Q2 = 0.5
  243. Q3 = 0.63
  244. Q4 = 0.63
  245. K = 0.4
  246. elif weighting_type == 'Wd': # 水平方向(X和Y轴)
  247. # Wd滤波器参数
  248. f1 = 0.4
  249. f2 = 100.0
  250. f3 = 2.0
  251. f4 = 2.0
  252. Q1 = 0.63
  253. Q2 = 0.5
  254. Q3 = 0.63
  255. Q4 = 0.63
  256. K = 0.4
  257. elif weighting_type == 'Wf': # 运动病相关
  258. # Wf滤波器参数
  259. f1 = 0.08
  260. f2 = 0.63
  261. f3 = 0.25
  262. f4 = 0.8
  263. Q1 = 0.63
  264. Q2 = 0.86
  265. Q3 = 0.8
  266. Q4 = 0.8
  267. K = 1.0
  268. else:
  269. self.logger.warning(f"未知的加权类型: {weighting_type},使用原始数据")
  270. return acceleration_data
  271. # 将频率转换为角频率
  272. w1 = 2 * np.pi * f1
  273. w2 = 2 * np.pi * f2
  274. w3 = 2 * np.pi * f3
  275. w4 = 2 * np.pi * f4
  276. # 设计高通滤波器(s域)
  277. b1 = [K * w1**2, 0]
  278. a1 = [1, w1/Q1, w1**2]
  279. # 设计低通滤波器(s域)
  280. b2 = [K, 0, 0]
  281. a2 = [1, w2/Q2, w2**2]
  282. # 设计加速度-速度转换滤波器(s域)
  283. b3 = [K, 0]
  284. a3 = [1, w3/Q3, w3**2]
  285. # 设计上升滤波器(s域)
  286. b4 = [K, 0, 0]
  287. a4 = [1, w4/Q4, w4**2]
  288. # 使用双线性变换将s域滤波器转换为z域
  289. b1_z, a1_z = scipy.signal.bilinear(b1, a1, fs)
  290. b2_z, a2_z = scipy.signal.bilinear(b2, a2, fs)
  291. b3_z, a3_z = scipy.signal.bilinear(b3, a3, fs)
  292. b4_z, a4_z = scipy.signal.bilinear(b4, a4, fs)
  293. # 应用滤波器链
  294. data_np = acceleration_data.to_numpy()
  295. filtered_data = scipy.signal.lfilter(b1_z, a1_z, data_np)
  296. filtered_data = scipy.signal.lfilter(b2_z, a2_z, filtered_data)
  297. filtered_data = scipy.signal.lfilter(b3_z, a3_z, filtered_data)
  298. filtered_data = scipy.signal.lfilter(b4_z, a4_z, filtered_data)
  299. return pd.Series(filtered_data, index=acceleration_data.index)
  300. def calculate_vdv(self):
  301. """计算振动剂量值(Vibration Dose Value, VDV)
  302. VDV更强调"冲击"或"突发"振动事件对整体舒适度的影响,
  303. 常用于评估包含较多瞬态冲击或颠簸的振动信号。
  304. 计算公式: VDV = (∫[0,T] |a_ω(t)|⁴ dt)^(1/4)
  305. 相较于MSDV的二次累积,VDV的四次累积使其对高幅值短时冲击更为敏感,
  306. 能够更准确地反映剧烈颠簸对乘员舒适度的不利影响。
  307. Returns:
  308. float: 振动剂量值
  309. """
  310. # 获取数据
  311. df = self.ego_df.copy()
  312. # 确保有必要的列
  313. if 'accelX' not in df.columns or 'accelY' not in df.columns:
  314. self.logger.warning("缺少计算振动剂量值所需的数据列")
  315. return self.calculated_value['vdv']
  316. # 将东北天坐标系下的加速度转换为车身坐标系下的加速度
  317. if 'posH' not in df.columns:
  318. self.logger.warning("缺少航向角数据,无法进行坐标转换")
  319. return self.calculated_value['vdv']
  320. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  321. df['posH_rad'] = np.radians(df['posH'])
  322. # 转换加速度到车身坐标系
  323. df['a_x_body'] = df['accelX'] * np.sin(df['posH_rad']) + df['accelY'] * np.cos(df['posH_rad'])
  324. df['a_y_body'] = df['accelX'] * np.cos(df['posH_rad']) - df['accelY'] * np.sin(df['posH_rad'])
  325. # Z方向加速度,如果没有则假设为0
  326. df['a_z_body'] = df['accelZ'] if 'accelZ' in df.columns else pd.Series(np.zeros(len(df)))
  327. # 计算时间差
  328. df['time_diff'] = df['simTime'].diff().fillna(0)
  329. # 应用ISO 2631-1:1997标准的频率加权滤波
  330. # 估计采样频率 - 假设数据是均匀采样的
  331. if len(df) > 1:
  332. time_diff = df['simTime'].diff().median()
  333. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  334. else:
  335. fs = 100 # 默认采样频率
  336. # 对各方向加速度应用适当的频率加权
  337. # 对于VDV评估,使用与MSDV相同的加权滤波器
  338. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wd', fs) # 水平方向使用Wd
  339. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wd', fs) # 水平方向使用Wd
  340. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wk', fs) # 垂直方向使用Wk
  341. # 计算加权均方根值 (r.m.s.)
  342. a_x_rms = np.sqrt(np.mean(a_x_weighted**2))
  343. a_y_rms = np.sqrt(np.mean(a_y_weighted**2))
  344. a_z_rms = np.sqrt(np.mean(a_z_weighted**2))
  345. # 记录r.m.s.值用于参考
  346. self.logger.info(f"X方向加权均方根值: {a_x_rms}")
  347. self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
  348. self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
  349. # 计算VDV - 对加速度四次方进行时间积分,再开四次方根
  350. # 对于X方向(前后方向)
  351. vdv_x = np.power(np.sum(np.power(np.abs(a_x_weighted), 4) * df['time_diff']), 0.25)
  352. # 对于Y方向(左右方向)
  353. vdv_y = np.power(np.sum(np.power(np.abs(a_y_weighted), 4) * df['time_diff']), 0.25)
  354. # 对于Z方向(上下方向)
  355. vdv_z = np.power(np.sum(np.power(np.abs(a_z_weighted), 4) * df['time_diff']), 0.25)
  356. # 综合VDV - 可以使用向量和或加权和
  357. # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
  358. vdv = np.sqrt(vdv_x**2 + vdv_y**2 + (1.4 * vdv_z)**2)
  359. # 记录计算结果
  360. self.calculated_value['vdv'] = vdv
  361. self.logger.info(f"振动剂量值(VDV)计算结果: {vdv}")
  362. self.logger.info(f"X方向VDV: {vdv_x}, Y方向VDV: {vdv_y}, Z方向VDV: {vdv_z}")
  363. return vdv
  364. def calculate_motion_sickness_probability(self):
  365. """计算基于运动参数的晕车概率模型
  366. 该模型综合考虑三轴加速度和加加速度(Jerk)对乘客晕车感的影响,
  367. 通过非线性指数函数将计算结果映射到0-100%的概率范围。
  368. 计算公式: P = 100 * [1 - exp(-(α*(ax²+ay²+az²) + β*Jerk_RMS) / γ)]
  369. 其中:
  370. - ax, ay, az: 三轴加速度,表征车辆纵向、横向、垂向运动强度
  371. - Jerk_RMS: 加速度变化率的均方根值,反映运动突兀性
  372. - α: 加速度权重(默认0.1 s⁴/m²)
  373. - β: Jerk权重(默认0.5 s²/m²)
  374. - γ: 归一化因子(默认10 m²/s⁴)
  375. Returns:
  376. float: 晕车概率(0-100%)
  377. """
  378. # 获取数据
  379. df = self.ego_df.copy()
  380. # 确保有必要的列
  381. if 'accelX' not in df.columns or 'accelY' not in df.columns:
  382. self.logger.warning("缺少计算晕车概率所需的数据列")
  383. return self.calculated_value.get('motionSickness', 0)
  384. # 将东北天坐标系下的加速度转换为车身坐标系下的加速度
  385. if 'posH' not in df.columns:
  386. self.logger.warning("缺少航向角数据,无法进行坐标转换")
  387. return self.calculated_value.get('motionSickness', 0)
  388. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  389. df['posH_rad'] = np.radians(df['posH'])
  390. # 转换加速度到车身坐标系
  391. df['a_x_body'] = df['accelX'] * np.sin(df['posH_rad']) + df['accelY'] * np.cos(df['posH_rad'])
  392. df['a_y_body'] = df['accelX'] * np.cos(df['posH_rad']) - df['accelY'] * np.sin(df['posH_rad'])
  393. # Z方向加速度,如果没有则假设为0
  394. df['a_z_body'] = df['accelZ'] if 'accelZ' in df.columns else pd.Series(np.zeros(len(df)))
  395. # 计算时间差
  396. df['time_diff'] = df['simTime'].diff().fillna(0)
  397. # 应用ISO 2631-1:1997标准的频率加权滤波
  398. # 估计采样频率 - 假设数据是均匀采样的
  399. if len(df) > 1:
  400. time_diff = df['simTime'].diff().median()
  401. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  402. else:
  403. fs = 100 # 默认采样频率
  404. # 对各方向加速度应用适当的频率加权
  405. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wf', fs) # 使用Wf滤波器(晕动相关)
  406. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wf', fs)
  407. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wf', fs)
  408. # 计算加加速度(Jerk) - 加速度的导数
  409. # 使用中心差分法计算导数
  410. df['jerk_x'] = a_x_weighted.diff() / df['time_diff']
  411. df['jerk_y'] = a_y_weighted.diff() / df['time_diff']
  412. df['jerk_z'] = a_z_weighted.diff() / df['time_diff']
  413. # 填充NaN值
  414. df[['jerk_x', 'jerk_y', 'jerk_z']] = df[['jerk_x', 'jerk_y', 'jerk_z']].fillna(0)
  415. # 计算Jerk的均方根值(RMS)
  416. jerk_squared_sum = df['jerk_x']**2 + df['jerk_y']**2 + df['jerk_z']**2
  417. jerk_rms = np.sqrt(np.mean(jerk_squared_sum))
  418. # 计算加速度平方和的均值
  419. accel_squared_sum = a_x_weighted**2 + a_y_weighted**2 + a_z_weighted**2
  420. accel_squared_mean = np.mean(accel_squared_sum)
  421. # 设置模型参数
  422. alpha = 0.1 # 加速度权重(s⁴/m²)
  423. beta = 0.5 # Jerk权重(s²/m²)
  424. gamma = 10.0 # 归一化因子(m²/s⁴)
  425. # 计算晕车概率
  426. acceleration_term = alpha * accel_squared_mean
  427. jerk_term = beta * jerk_rms
  428. score = (acceleration_term + jerk_term) / gamma
  429. probability = 100 * (1 - np.exp(-score))
  430. # 限制在0-100%范围内
  431. probability = np.clip(probability, 0, 100)
  432. # 记录计算结果
  433. self.calculated_value['motionSickness'] = probability
  434. self.logger.info(f"晕车概率(Motion Sickness Probability)计算结果: {probability:.2f}%")
  435. self.logger.info(f"加速度平方和均值: {accel_squared_mean:.4f} m²/s⁴, Jerk均方根值: {jerk_rms:.4f} m/s³")
  436. return probability
  437. def calculate_motion_comfort_index(self):
  438. """计算运动舒适度指数
  439. 该指数综合考虑加速度、加加速度和角速度对乘客舒适感的影响,
  440. 通过加权平均的方式得出一个0-10的舒适度评分,其中10表示最舒适。
  441. Returns:
  442. float: 运动舒适度指数(0-10)
  443. """
  444. # 获取数据
  445. df = self.ego_df.copy()
  446. # 确保有必要的列
  447. if 'accelX' not in df.columns or 'accelY' not in df.columns:
  448. self.logger.warning("缺少计算运动舒适度指数所需的数据列")
  449. return self.calculated_value.get('motionComfortIndex', 5.0)
  450. # 计算合成加速度
  451. df['accel_magnitude'] = np.sqrt(df['accelX']**2 + df['accelY']**2)
  452. if 'accelZ' in df.columns:
  453. df['accel_magnitude'] = np.sqrt(df['accel_magnitude']**2 + df['accelZ']**2)
  454. # 计算加加速度(Jerk)
  455. df['time_diff'] = df['simTime'].diff().fillna(0.01)
  456. df['jerk_x'] = df['accelX'].diff() / df['time_diff']
  457. df['jerk_y'] = df['accelY'].diff() / df['time_diff']
  458. df['jerk_magnitude'] = np.sqrt(df['jerk_x']**2 + df['jerk_y']**2)
  459. if 'accelZ' in df.columns:
  460. df['jerk_z'] = df['accelZ'].diff() / df['time_diff']
  461. df['jerk_magnitude'] = np.sqrt(df['jerk_magnitude']**2 + df['jerk_z']**2)
  462. # 计算角速度
  463. if 'rollRate' in df.columns and 'pitchRate' in df.columns:
  464. df['angular_velocity'] = np.sqrt(df['rollRate']**2 + df['pitchRate']**2)
  465. if 'speedH' in df.columns: # 使用航向角速度作为偏航角速度
  466. df['angular_velocity'] = np.sqrt(df['angular_velocity']**2 + df['speedH']**2)
  467. else:
  468. df['angular_velocity'] = 0
  469. # 计算各指标的均方根值
  470. accel_rms = np.sqrt(np.mean(df['accel_magnitude']**2))
  471. jerk_rms = np.sqrt(np.mean(df['jerk_magnitude']**2))
  472. angular_rms = np.sqrt(np.mean(df['angular_velocity']**2))
  473. # 设置阈值和权重
  474. accel_threshold = 2.0 # m/s²,超过此值舒适度开始下降
  475. jerk_threshold = 1.0 # m/s³,超过此值舒适度开始下降
  476. angular_threshold = 0.2 # rad/s,超过此值舒适度开始下降
  477. accel_weight = 0.5 # 加速度权重
  478. jerk_weight = 0.3 # 加加速度权重
  479. angular_weight = 0.2 # 角速度权重
  480. # 计算各分量的舒适度得分(0-10)
  481. accel_score = 10 * np.exp(-max(0, accel_rms - accel_threshold) / accel_threshold)
  482. jerk_score = 10 * np.exp(-max(0, jerk_rms - jerk_threshold) / jerk_threshold)
  483. angular_score = 10 * np.exp(-max(0, angular_rms - angular_threshold) / angular_threshold)
  484. # 计算加权平均得分
  485. comfort_index = (accel_weight * accel_score +
  486. jerk_weight * jerk_score +
  487. angular_weight * angular_score)
  488. # 限制在0-10范围内
  489. comfort_index = np.clip(comfort_index, 0, 10)
  490. # 记录计算结果
  491. self.calculated_value['motionComfortIndex'] = comfort_index
  492. self.logger.info(f"运动舒适度指数(Motion Comfort Index)计算结果: {comfort_index:.2f}/10")
  493. self.logger.info(f"加速度RMS: {accel_rms:.4f} m/s², 加加速度RMS: {jerk_rms:.4f} m/s³, 角速度RMS: {angular_rms:.4f} rad/s")
  494. return comfort_index
  495. def calculate_ride_quality_score(self):
  496. """计算乘坐质量评分
  497. 该评分基于ISO 2631标准,综合考虑振动频率、振幅和持续时间对人体的影响,
  498. 评估车辆在不同路况下的乘坐舒适性。
  499. Returns:
  500. float: 乘坐质量评分(0-100)
  501. """
  502. # 获取数据
  503. df = self.ego_df.copy()
  504. # 确保有必要的列
  505. if 'accelZ' not in df.columns:
  506. self.logger.warning("缺少计算乘坐质量评分所需的垂直加速度数据")
  507. return self.calculated_value.get('rideQualityScore', 70.0)
  508. # 估计采样频率
  509. if len(df) > 1:
  510. time_diff = df['simTime'].diff().median()
  511. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  512. else:
  513. fs = 100 # 默认采样频率
  514. # 应用ISO 2631-1:1997标准的频率加权滤波
  515. a_z_weighted = self._apply_frequency_weighting(df['accelZ'], 'Wk', fs)
  516. # 计算垂直方向加速度的均方根值
  517. a_z_rms = np.sqrt(np.mean(a_z_weighted**2))
  518. # 根据ISO 2631-1:1997标准的舒适度评价
  519. # < 0.315 m/s² - 不感到不适
  520. # 0.315-0.63 m/s² - 轻微不适
  521. # 0.5-1.0 m/s² - 有些不适
  522. # 0.8-1.6 m/s² - 不适
  523. # 1.25-2.5 m/s² - 非常不适
  524. # > 2.0 m/s² - 极度不适
  525. # 将RMS值映射到0-100的评分
  526. if a_z_rms < 0.315:
  527. base_score = 90
  528. elif a_z_rms < 0.63:
  529. base_score = 80
  530. elif a_z_rms < 1.0:
  531. base_score = 70
  532. elif a_z_rms < 1.6:
  533. base_score = 60
  534. elif a_z_rms < 2.5:
  535. base_score = 40
  536. else:
  537. base_score = 20
  538. # 考虑振动持续时间的影响
  539. duration_factor = min(1.0, 10.0 / (df['simTime'].max() - df['simTime'].min()))
  540. # 考虑振动频率分布的影响
  541. # 计算功率谱密度
  542. if len(a_z_weighted) > 50: # 确保有足够的数据点进行频谱分析
  543. f, psd = self._calculate_psd(a_z_weighted, fs)
  544. # 计算人体敏感频率范围(4-8Hz)的能量占比
  545. sensitive_mask = (f >= 4) & (f <= 8)
  546. sensitive_energy = np.sum(psd[sensitive_mask])
  547. total_energy = np.sum(psd)
  548. frequency_factor = 1.0 - 0.3 * (sensitive_energy / total_energy if total_energy > 0 else 0)
  549. else:
  550. frequency_factor = 1.0
  551. # 计算最终评分
  552. ride_quality_score = base_score * duration_factor * frequency_factor
  553. # 限制在0-100范围内
  554. ride_quality_score = np.clip(ride_quality_score, 0, 100)
  555. # 记录计算结果
  556. self.calculated_value['rideQualityScore'] = ride_quality_score
  557. self.logger.info(f"乘坐质量评分(Ride Quality Score)计算结果: {ride_quality_score:.2f}/100")
  558. self.logger.info(f"垂直加速度RMS: {a_z_rms:.4f} m/s²")
  559. return ride_quality_score
  560. def _calculate_psd(self, signal, fs):
  561. """计算信号的功率谱密度
  562. Args:
  563. signal: 输入信号
  564. fs: 采样频率
  565. Returns:
  566. tuple: 频率和对应的功率谱密度
  567. """
  568. # 使用Welch方法计算PSD
  569. from scipy import signal as sp_signal
  570. f, psd = sp_signal.welch(signal, fs, nperseg=min(256, len(signal)//2))
  571. return f, psd
  572. def calculate_ava_vav(self):
  573. """计算多维度综合加权加速度
  574. 基于ISO 2631-1:1997标准,综合考虑车辆在三个平移方向和三个旋转方向的加速度或角速度
  575. Returns:
  576. float: 多维度综合加权加速度值
  577. """
  578. # 定义各方向的权重系数
  579. k_x = 1.0 # X方向加速度权重
  580. k_y = 1.0 # Y方向加速度权重
  581. k_z = 1.0 # Z方向加速度权重
  582. k_roll = 0.63 # 横滚角速度权重
  583. k_pitch = 0.8 # 俯仰角速度权重
  584. k_yaw = 0.5 # 偏航角速度权重
  585. # 获取数据
  586. df = self.ego_df.copy()
  587. # 确保有必要的列
  588. if 'accelX' not in df.columns or 'accelY' not in df.columns:
  589. self.logger.warning("缺少计算多维度综合加权加速度所需的数据列")
  590. return self.calculated_value['ava_vav']
  591. # 将东北天坐标系下的加速度转换为车身坐标系下的加速度
  592. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  593. if 'posH' not in df.columns:
  594. self.logger.warning("缺少航向角数据,无法进行坐标转换")
  595. return self.calculated_value['ava_vav']
  596. df['posH_rad'] = np.radians(df['posH'])
  597. # 转换加速度到车身坐标系
  598. # 注意:posH是航向角,北向为0度,顺时针为正
  599. # 车身X轴 = 东向*sin(posH) + 北向*cos(posH)
  600. # 车身Y轴 = 东向*cos(posH) - 北向*sin(posH)
  601. df['a_x_body'] = df['accelX'] * np.sin(df['posH_rad']) + df['accelY'] * np.cos(df['posH_rad'])
  602. df['a_y_body'] = df['accelX'] * np.cos(df['posH_rad']) - df['accelY'] * np.sin(df['posH_rad'])
  603. # Z方向加速度,如果没有则假设为0
  604. df['a_z_body'] = df['accelZ'] if 'accelZ' in df.columns else pd.Series(np.zeros(len(df)))
  605. # 角速度数据,如果没有则使用角速度变化率代替
  606. # 注意:speedH是航向角速度,需要转换为车身坐标系下的偏航角速度
  607. omega_roll = df['rollRate'] if 'rollRate' in df.columns else pd.Series(np.zeros(len(df)))
  608. omega_pitch = df['pitchRate'] if 'pitchRate' in df.columns else pd.Series(np.zeros(len(df)))
  609. omega_yaw = df['speedH'] # 使用航向角速度作为偏航角速度
  610. # 应用ISO 2631-1:1997标准的频率加权滤波
  611. # 估计采样频率 - 假设数据是均匀采样的
  612. if len(df) > 1:
  613. time_diff = df['simTime'].diff().median()
  614. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  615. else:
  616. fs = 100 # 默认采样频率
  617. # 对各方向加速度应用适当的频率加权
  618. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wd', fs)
  619. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wd', fs)
  620. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wk', fs)
  621. # 对角速度也应用适当的频率加权
  622. # 注意:ISO标准没有直接指定角速度的加权,这里使用简化处理
  623. omega_roll_weighted = omega_roll # 可以根据需要应用适当的滤波
  624. omega_pitch_weighted = omega_pitch
  625. omega_yaw_weighted = omega_yaw
  626. # 计算加权均方根值 (r.m.s.)
  627. # 对每个方向的加速度/角速度平方后求平均,再开平方根
  628. a_x_rms = np.sqrt(np.mean(a_x_weighted**2))
  629. a_y_rms = np.sqrt(np.mean(a_y_weighted**2))
  630. a_z_rms = np.sqrt(np.mean(a_z_weighted**2))
  631. omega_roll_rms = np.sqrt(np.mean(omega_roll_weighted**2))
  632. omega_pitch_rms = np.sqrt(np.mean(omega_pitch_weighted**2))
  633. omega_yaw_rms = np.sqrt(np.mean(omega_yaw_weighted**2))
  634. # 计算综合加权加速度
  635. ava_vav = np.sqrt(
  636. k_x * a_x_rms**2 +
  637. k_y * a_y_rms**2 +
  638. k_z * a_z_rms**2 +
  639. k_roll * omega_roll_rms**2 +
  640. k_pitch * omega_pitch_rms**2 +
  641. k_yaw * omega_yaw_rms**2
  642. )
  643. # 记录计算结果
  644. self.calculated_value['ava_vav'] = ava_vav
  645. self.logger.info(f"多维度综合加权加速度(ava_vav)计算结果: {ava_vav}")
  646. return ava_vav
  647. def calculate_msdv(self):
  648. """计算晕动剂量值(Motion Sickness Dose Value, MSDV)
  649. MSDV用于量化乘员因持续振动而产生的晕动风险,其物理意义是
  650. "频率加权后的加速度有效值的平方对时间的累积",
  651. 能够反映乘员在一定时间内受到振动刺激的总量。
  652. 计算公式: MSDV = √(∫[0,T] a_ω(t)² dt)
  653. Returns:
  654. float: 晕动剂量值
  655. """
  656. # 获取数据
  657. df = self.ego_df.copy()
  658. # 确保有必要的列
  659. if 'accelX' not in df.columns or 'accelY' not in df.columns:
  660. self.logger.warning("缺少计算晕动剂量值所需的数据列")
  661. return self.calculated_value['msdv']
  662. # 将东北天坐标系下的加速度转换为车身坐标系下的加速度
  663. if 'posH' not in df.columns:
  664. self.logger.warning("缺少航向角数据,无法进行坐标转换")
  665. return self.calculated_value['msdv']
  666. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  667. df['posH_rad'] = np.radians(df['posH'])
  668. # 转换加速度到车身坐标系
  669. # 注意:posH是航向角,北向为0度,顺时针为正
  670. # 车身X轴 = 东向*sin(posH) + 北向*cos(posH)
  671. # 车身Y轴 = 东向*cos(posH) - 北向*sin(posH)
  672. df['a_x_body'] = df['accelX'] * np.sin(df['posH_rad']) + df['accelY'] * np.cos(df['posH_rad'])
  673. df['a_y_body'] = df['accelX'] * np.cos(df['posH_rad']) - df['accelY'] * np.sin(df['posH_rad'])
  674. # Z方向加速度,如果没有则假设为0
  675. df['a_z_body'] = df['accelZ'] if 'accelZ' in df.columns else pd.Series(np.zeros(len(df)))
  676. # 计算时间差
  677. df['time_diff'] = df['simTime'].diff().fillna(0)
  678. total_time = df['time_diff'].sum()
  679. # 应用ISO 2631-1:1997标准的频率加权滤波
  680. # 估计采样频率 - 假设数据是均匀采样的
  681. if len(df) > 1:
  682. time_diff = df['simTime'].diff().median()
  683. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  684. else:
  685. fs = 100 # 默认采样频率
  686. # 对各方向加速度应用适当的频率加权
  687. # 对于晕动评估,使用Wf加权滤波器
  688. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wf', fs)
  689. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wf', fs)
  690. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wf', fs)
  691. # 先计算加权均方根值 (r.m.s.)
  692. a_x_rms = np.sqrt(np.sum(a_x_weighted**2 * df['time_diff']) / total_time)
  693. a_y_rms = np.sqrt(np.sum(a_y_weighted**2 * df['time_diff']) / total_time)
  694. a_z_rms = np.sqrt(np.sum(a_z_weighted**2 * df['time_diff']) / total_time)
  695. # 记录r.m.s.值用于参考
  696. self.logger.info(f"X方向加权均方根值: {a_x_rms}")
  697. self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
  698. self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
  699. # 计算MSDV - 基于r.m.s.值和总时间
  700. msdv_x = a_x_rms * np.sqrt(total_time)
  701. msdv_y = a_y_rms * np.sqrt(total_time)
  702. msdv_z = a_z_rms * np.sqrt(total_time)
  703. # 综合MSDV - 可以使用向量和或加权和
  704. # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
  705. msdv = np.sqrt(msdv_x**2 + msdv_y**2 + (1.4 * msdv_z)**2)
  706. # 记录计算结果
  707. self.calculated_value['msdv'] = msdv
  708. self.logger.info(f"晕动剂量值(MSDV)计算结果: {msdv}")
  709. self.logger.info(f"X方向MSDV: {msdv_x}, Y方向MSDV: {msdv_y}, Z方向MSDV: {msdv_z}")
  710. return msdv
  711. def _cal_cur_ego_path(self, row):
  712. """计算车辆轨迹曲率"""
  713. try:
  714. divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
  715. if not divide:
  716. res = None
  717. else:
  718. res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  719. except:
  720. res = None
  721. return res
  722. def _peak_valley_determination(self, df):
  723. """确定角速度的峰谷"""
  724. peaks, _ = scipy.signal.find_peaks(
  725. df['speedH'], height=2.3, distance=3,
  726. prominence=2.3, width=1)
  727. valleys, _ = scipy.signal.find_peaks(
  728. -df['speedH'], height=2.3, distance=3,
  729. prominence=2.3, width=1)
  730. return sorted(list(peaks) + list(valleys))
  731. def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=4.6):
  732. """判断峰谷是否满足蛇行条件"""
  733. t_diff = p_curr[0] - p_last[0]
  734. v_diff = abs(p_curr[1] - p_last[1])
  735. s = p_curr[1] * p_last[1]
  736. if t_diff < tw and v_diff > avg and s < 0:
  737. if [p_last[0], p_curr[0]] not in self.zigzag_time_list:
  738. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  739. return True
  740. return False
  741. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  742. """处理顿挫数据"""
  743. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  744. return np.nan
  745. elif abs(lon_acc) == 0:
  746. return 0
  747. elif lon_acc > 0 and lon_acc < ip_acc:
  748. return 1
  749. elif lon_acc < 0 and lon_acc > ip_dec:
  750. return -1
  751. else:
  752. return 0
  753. @peak_valley_decorator
  754. def _zigzag_count_func(self, p_curr, p_last, flag=True):
  755. """计算蛇行次数"""
  756. if flag:
  757. self.zigzag_count += 1
  758. else:
  759. self.zigzag_count += 0
  760. @peak_valley_decorator
  761. def _cal_zigzag_strength(self, p_curr, p_last, flag=True):
  762. """计算蛇行强度"""
  763. if flag:
  764. v_diff = abs(p_curr[1] - p_last[1])
  765. t_diff = p_curr[0] - p_last[0]
  766. if t_diff > 0:
  767. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  768. else:
  769. self.zigzag_stre_list = []
  770. def _get_zigzag_times(self):
  771. """获取所有蛇行时间点"""
  772. all_times = []
  773. for time_range in self.zigzag_time_list:
  774. start, end = time_range
  775. # 获取这个时间范围内的所有时间点
  776. times_in_range = self.ego_df[(self.ego_df['simTime'] >= start) &
  777. (self.ego_df['simTime'] <= end)]['simTime'].tolist()
  778. all_times.extend(times_in_range)
  779. return all_times
  780. def calculate_zigzag_count(self):
  781. """计算蛇行指标"""
  782. self._zigzag_count_func()
  783. return self.zigzag_count
  784. def calculate_shake_count(self):
  785. """计算晃动指标"""
  786. self._shake_detector()
  787. return self.shake_count
  788. def calculate_cadence_count(self):
  789. """计算顿挫指标"""
  790. self._cadence_detector()
  791. return self.cadence_count
  792. def calculate_slam_brake_count(self):
  793. """计算急刹车指标"""
  794. self._slam_brake_detector()
  795. return self.slam_brake_count
  796. def calculate_slam_accel_count(self):
  797. """计算急加速指标"""
  798. self._slam_accel_detector()
  799. return self.slam_accel_count
  800. def _shake_detector(self):
  801. """检测晃动事件"""
  802. # 获取数据
  803. df = self.ego_df.copy()
  804. # 检查是否有必要的列
  805. if 'lat_acc' not in df.columns:
  806. self.logger.warning("缺少计算晃动指标所需的数据列")
  807. return
  808. # 设置晃动检测阈值
  809. shake_threshold = 1.5 # 横向加速度阈值 m/s²
  810. min_duration = 0.5 # 最小持续时间 秒
  811. # 标记超过阈值的点
  812. df['shake_flag'] = (abs(df['lat_acc']) > shake_threshold).astype(int)
  813. # 检测连续的晃动事件
  814. shake_events = []
  815. in_event = False
  816. start_idx = 0
  817. for i, row in df.iterrows():
  818. if row['shake_flag'] == 1 and not in_event:
  819. # 开始新的晃动事件
  820. in_event = True
  821. start_idx = i
  822. elif row['shake_flag'] == 0 and in_event:
  823. # 结束当前晃动事件
  824. in_event = False
  825. end_idx = i - 1
  826. # 计算事件持续时间
  827. start_time = df.loc[start_idx, 'simTime']
  828. end_time = df.loc[end_idx, 'simTime']
  829. duration = end_time - start_time
  830. # 如果持续时间超过阈值,记录为有效晃动事件
  831. if duration >= min_duration:
  832. shake_events.append({
  833. 'start_time': start_time,
  834. 'end_time': end_time,
  835. 'start_frame': df.loc[start_idx, 'simFrame'],
  836. 'end_frame': df.loc[end_idx, 'simFrame'],
  837. 'duration': duration,
  838. 'max_lat_acc': df.loc[start_idx:end_idx, 'lat_acc'].abs().max()
  839. })
  840. # 添加到不舒适事件表
  841. self.discomfort_df = self.discomfort_df.append({
  842. 'start_time': start_time,
  843. 'end_time': end_time,
  844. 'start_frame': df.loc[start_idx, 'simFrame'],
  845. 'end_frame': df.loc[end_idx, 'simFrame'],
  846. 'type': 'shake'
  847. }, ignore_index=True)
  848. # 如果最后一个事件没有结束,检查它
  849. if in_event:
  850. end_idx = len(df) - 1
  851. start_time = df.loc[start_idx, 'simTime']
  852. end_time = df.loc[end_idx, 'simTime']
  853. duration = end_time - start_time
  854. if duration >= min_duration:
  855. shake_events.append({
  856. 'start_time': start_time,
  857. 'end_time': end_time,
  858. 'start_frame': df.loc[start_idx, 'simFrame'],
  859. 'end_frame': df.loc[end_idx, 'simFrame'],
  860. 'duration': duration,
  861. 'max_lat_acc': df.loc[start_idx:end_idx, 'lat_acc'].abs().max()
  862. })
  863. # 添加到不舒适事件表
  864. self.discomfort_df = self.discomfort_df.append({
  865. 'start_time': start_time,
  866. 'end_time': end_time,
  867. 'start_frame': df.loc[start_idx, 'simFrame'],
  868. 'end_frame': df.loc[end_idx, 'simFrame'],
  869. 'type': 'shake'
  870. }, ignore_index=True)
  871. # 更新晃动计数
  872. self.shake_count = len(shake_events)
  873. self.logger.info(f"检测到 {self.shake_count} 次晃动事件")
  874. def _cadence_detector(self):
  875. """检测顿挫事件"""
  876. # 获取数据
  877. df = self.ego_df.copy()
  878. # 检查是否有必要的列
  879. if 'cadence' not in df.columns:
  880. self.logger.warning("缺少计算顿挫指标所需的数据列")
  881. return
  882. # 设置顿挫检测参数
  883. min_duration = 0.3 # 最小持续时间 秒
  884. # 检测连续的顿挫事件
  885. cadence_events = []
  886. in_event = False
  887. start_idx = 0
  888. for i, row in df.iterrows():
  889. if not pd.isna(row['cadence']) and not in_event:
  890. # 开始新的顿挫事件
  891. in_event = True
  892. start_idx = i
  893. current_direction = np.sign(row['cadence'])
  894. elif (pd.isna(row['cadence']) or np.sign(row['cadence']) != current_direction) and in_event:
  895. # 结束当前顿挫事件
  896. in_event = False
  897. end_idx = i - 1
  898. # 计算事件持续时间
  899. start_time = df.loc[start_idx, 'simTime']
  900. end_time = df.loc[end_idx, 'simTime']
  901. duration = end_time - start_time
  902. # 如果持续时间超过阈值,记录为有效顿挫事件
  903. if duration >= min_duration:
  904. cadence_events.append({
  905. 'start_time': start_time,
  906. 'end_time': end_time,
  907. 'start_frame': df.loc[start_idx, 'simFrame'],
  908. 'end_frame': df.loc[end_idx, 'simFrame'],
  909. 'duration': duration,
  910. 'direction': 'acceleration' if current_direction > 0 else 'deceleration'
  911. })
  912. # 添加到不舒适事件表
  913. self.discomfort_df = self.discomfort_df.append({
  914. 'start_time': start_time,
  915. 'end_time': end_time,
  916. 'start_frame': df.loc[start_idx, 'simFrame'],
  917. 'end_frame': df.loc[end_idx, 'simFrame'],
  918. 'type': 'cadence'
  919. }, ignore_index=True)
  920. # 如果最后一个事件没有结束,检查它
  921. if in_event:
  922. end_idx = len(df) - 1
  923. start_time = df.loc[start_idx, 'simTime']
  924. end_time = df.loc[end_idx, 'simTime']
  925. duration = end_time - start_time
  926. if duration >= min_duration:
  927. cadence_events.append({
  928. 'start_time': start_time,
  929. 'end_time': end_time,
  930. 'start_frame': df.loc[start_idx, 'simFrame'],
  931. 'end_frame': df.loc[end_idx, 'simFrame'],
  932. 'duration': duration,
  933. 'direction': 'acceleration' if current_direction > 0 else 'deceleration'
  934. })
  935. # 添加到不舒适事件表
  936. self.discomfort_df = self.discomfort_df.append({
  937. 'start_time': start_time,
  938. 'end_time': end_time,
  939. 'start_frame': df.loc[start_idx, 'simFrame'],
  940. 'end_frame': df.loc[end_idx, 'simFrame'],
  941. 'type': 'cadence'
  942. }, ignore_index=True)
  943. # 更新顿挫计数
  944. self.cadence_count = len(cadence_events)
  945. self.logger.info(f"检测到 {self.cadence_count} 次顿挫事件")
  946. def _slam_brake_detector(self):
  947. """检测急刹车事件"""
  948. # 获取数据
  949. df = self.ego_df.copy()
  950. # 检查是否有必要的列
  951. if 'slam_brake' not in df.columns:
  952. self.logger.warning("缺少计算急刹车指标所需的数据列")
  953. return
  954. # 设置急刹车检测参数
  955. min_duration = 0.5 # 最小持续时间 秒
  956. # 检测连续的急刹车事件
  957. slam_brake_events = []
  958. in_event = False
  959. start_idx = 0
  960. for i, row in df.iterrows():
  961. if row['slam_brake'] == 1 and not in_event:
  962. # 开始新的急刹车事件
  963. in_event = True
  964. start_idx = i
  965. elif row['slam_brake'] == 0 and in_event:
  966. # 结束当前急刹车事件
  967. in_event = False
  968. end_idx = i - 1
  969. # 计算事件持续时间
  970. start_time = df.loc[start_idx, 'simTime']
  971. end_time = df.loc[end_idx, 'simTime']
  972. duration = end_time - start_time
  973. # 如果持续时间超过阈值,记录为有效急刹车事件
  974. if duration >= min_duration:
  975. slam_brake_events.append({
  976. 'start_time': start_time,
  977. 'end_time': end_time,
  978. 'start_frame': df.loc[start_idx, 'simFrame'],
  979. 'end_frame': df.loc[end_idx, 'simFrame'],
  980. 'duration': duration,
  981. 'min_lon_acc': df.loc[start_idx:end_idx, 'lon_acc'].min()
  982. })
  983. # 添加到不舒适事件表
  984. self.discomfort_df = self.discomfort_df.append({
  985. 'start_time': start_time,
  986. 'end_time': end_time,
  987. 'start_frame': df.loc[start_idx, 'simFrame'],
  988. 'end_frame': df.loc[end_idx, 'simFrame'],
  989. 'type': 'slam_brake'
  990. }, ignore_index=True)
  991. # 如果最后一个事件没有结束,检查它
  992. if in_event:
  993. end_idx = len(df) - 1
  994. start_time = df.loc[start_idx, 'simTime']
  995. end_time = df.loc[end_idx, 'simTime']
  996. duration = end_time - start_time
  997. if duration >= min_duration:
  998. slam_brake_events.append({
  999. 'start_time': start_time,
  1000. 'end_time': end_time,
  1001. 'start_frame': df.loc[start_idx, 'simFrame'],
  1002. 'end_frame': df.loc[end_idx, 'simFrame'],
  1003. 'duration': duration,
  1004. 'min_lon_acc': df.loc[start_idx:end_idx, 'lon_acc'].min()
  1005. })
  1006. # 添加到不舒适事件表
  1007. self.discomfort_df = self.discomfort_df.append({
  1008. 'start_time': start_time,
  1009. 'end_time': end_time,
  1010. 'start_frame': df.loc[start_idx, 'simFrame'],
  1011. 'end_frame': df.loc[end_idx, 'simFrame'],
  1012. 'type': 'slam_brake'
  1013. }, ignore_index=True)
  1014. # 更新急刹车计数
  1015. self.slam_brake_count = len(slam_brake_events)
  1016. self.logger.info(f"检测到 {self.slam_brake_count} 次急刹车事件")
  1017. def _slam_accel_detector(self):
  1018. """检测急加速事件"""
  1019. # 获取数据
  1020. df = self.ego_df.copy()
  1021. # 检查是否有必要的列
  1022. if 'slam_accel' not in df.columns:
  1023. self.logger.warning("缺少计算急加速指标所需的数据列")
  1024. return
  1025. # 设置急加速检测参数
  1026. min_duration = 0.5 # 最小持续时间 秒
  1027. # 检测连续的急加速事件
  1028. slam_accel_events = []
  1029. in_event = False
  1030. start_idx = 0
  1031. for i, row in df.iterrows():
  1032. if row['slam_accel'] == 1 and not in_event:
  1033. # 开始新的急加速事件
  1034. in_event = True
  1035. start_idx = i
  1036. elif row['slam_accel'] == 0 and in_event:
  1037. # 结束当前急加速事件
  1038. in_event = False
  1039. end_idx = i - 1
  1040. # 计算事件持续时间
  1041. start_time = df.loc[start_idx, 'simTime']
  1042. end_time = df.loc[end_idx, 'simTime']
  1043. duration = end_time - start_time
  1044. # 如果持续时间超过阈值,记录为有效急加速事件
  1045. if duration >= min_duration:
  1046. slam_accel_events.append({
  1047. 'start_time': start_time,
  1048. 'end_time': end_time,
  1049. 'start_frame': df.loc[start_idx, 'simFrame'],
  1050. 'end_frame': df.loc[end_idx, 'simFrame'],
  1051. 'duration': duration,
  1052. 'max_lon_acc': df.loc[start_idx:end_idx, 'lon_acc'].max()
  1053. })
  1054. # 添加到不舒适事件表
  1055. self.discomfort_df = self.discomfort_df.append({
  1056. 'start_time': start_time,
  1057. 'end_time': end_time,
  1058. 'start_frame': df.loc[start_idx, 'simFrame'],
  1059. 'end_frame': df.loc[end_idx, 'simFrame'],
  1060. 'type': 'slam_accel'
  1061. }, ignore_index=True)
  1062. # 如果最后一个事件没有结束,检查它
  1063. if in_event:
  1064. end_idx = len(df) - 1
  1065. start_time = df.loc[start_idx, 'simTime']
  1066. end_time = df.loc[end_idx, 'simTime']
  1067. duration = end_time - start_time
  1068. if duration >= min_duration:
  1069. slam_accel_events.append({
  1070. 'start_time': start_time,
  1071. 'end_time': end_time,
  1072. 'start_frame': df.loc[start_idx, 'simFrame'],
  1073. 'end_frame': df.loc[end_idx, 'simFrame'],
  1074. 'duration': duration,
  1075. 'max_lon_acc': df.loc[start_idx:end_idx, 'lon_acc'].max()
  1076. })
  1077. # 添加到不舒适事件表
  1078. self.discomfort_df = self.discomfort_df.append({
  1079. 'start_time': start_time,
  1080. 'end_time': end_time,
  1081. 'start_frame': df.loc[start_idx, 'simFrame'],
  1082. 'end_frame': df.loc[end_idx, 'simFrame'],
  1083. 'type': 'slam_accel'
  1084. }, ignore_index=True)
  1085. # 更新急加速计数
  1086. self.slam_accel_count = len(slam_accel_events)
  1087. self.logger.info(f"检测到 {self.slam_accel_count} 次急加速事件")