comfort.py 68 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876
  1. """
  2. 核心功能
  3. 坐标系处理:
  4. 支持东北天坐标系(ENU)到车身坐标系的转换
  5. 使用航向角(posH)进行坐标系旋转
  6. 车辆坐标系定义:x前,y左,z上
  7. 舒适性指标计算:
  8. 运动舒适度指数(motionComfortIndex)
  9. 乘坐质量评分(rideQualityScore)
  10. 晕车概率(motionSickness)
  11. 振动剂量值(VDV)
  12. 多维度综合加权加速度(ava_vav)
  13. 晕动剂量值(MSDV)
  14. 蛇行指标(zigzag)
  15. 晃动指标(shake)
  16. 顿挫指标(cadence)
  17. 急刹车指标(slamBrake)
  18. 急加速指标(slamAccelerate)
  19. 事件检测:
  20. 所有指标均支持事件检测
  21. 当指标超过阈值时记录事件
  22. 记录事件起止时间、帧号和类型
  23. 所有事件保存到discomfort_df数据框中
  24. 关键技术
  25. ISO 2631-1:1997标准:
  26. 实现Wk、Wd、Wf三种频率加权滤波器
  27. 用于计算VDV、MSDV等指标
  28. 车辆动力学分析:
  29. 基于加速度变化率(Jerk)评估舒适性
  30. 考虑三轴加速度和角速度的综合影响
  31. 事件检测算法:
  32. 峰值检测(蛇行、晃动)
  33. 聚类分析(顿挫)
  34. 连续事件检测(急刹车、急加速)
  35. 数据结构
  36. discomfort_df:
  37. 存储所有检测到的不舒适事件
  38. 包含字段:start_time, end_time, start_frame, end_frame, type
  39. calculated_value:
  40. 存储各指标的计算结果
  41. 便于后续报告生成
  42. 日志系统
  43. 详细日志记录:
  44. 记录指标计算过程
  45. 记录事件检测结果
  46. 记录异常情况
  47. 扩展性
  48. 模块化设计:
  49. 每个指标独立计算
  50. 方便添加新指标
  51. 配置驱动:
  52. 通过配置文件定义需要计算的指标
  53. 支持动态扩展
  54. """
  55. # !/usr/bin/env python
  56. # -*- coding: utf-8 -*-
  57. ##################################################################
  58. #
  59. # Copyright (c) 2023 CICV, Inc. All Rights Reserved
  60. #
  61. ##################################################################
  62. """
  63. @Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
  64. @Data: 2023/06/25
  65. @Last Modified: 2025/04/25
  66. @Summary: Comfort metrics
  67. """
  68. import scipy.signal
  69. import pandas as pd
  70. import numpy as np
  71. import os
  72. from pathlib import Path
  73. from typing import Dict, List, Any, Optional, Callable, Union, Tuple
  74. from modules.lib.score import Score
  75. from modules.lib.common import get_interpolation, get_frame_with_time
  76. from modules.lib import data_process
  77. from modules.lib.log_manager import LogManager
  78. from modules.lib.chart_generator import generate_comfort_chart_data
  79. # 更新COMFORT_INFO列表,添加车辆坐标系下的速度和加速度字段
  80. COMFORT_INFO = [
  81. "simTime",
  82. "simFrame",
  83. "speedX",
  84. "speedY",
  85. "accelX",
  86. "accelY",
  87. "curvHor",
  88. "lightMask",
  89. "v",
  90. "lat_acc",
  91. "lon_acc",
  92. "time_diff",
  93. "lon_acc_diff",
  94. "lon_acc_roc",
  95. "speedH",
  96. "accelH",
  97. "posH",
  98. "lon_acc_vehicle", # 车辆坐标系下的纵向加速度
  99. "lat_acc_vehicle", # 车辆坐标系下的横向加速度
  100. "acc_z_vehicle", # 车辆坐标系下的垂向加速度
  101. "lon_v_vehicle", # 车辆坐标系下的纵向速度
  102. "lat_v_vehicle", # 车辆坐标系下的横向速度
  103. "vel_z_vehicle" # 车辆坐标系下的垂向速度
  104. ]
  105. # ----------------------
  106. # 独立指标计算函数
  107. # ----------------------
  108. # 更新指标计算函数,返回事件次数而非指标值
  109. def calculate_motioncomfortindex(data_processed) -> dict:
  110. """计算运动舒适度指数事件次数"""
  111. comfort = ComfortCalculator(data_processed)
  112. # 计算舒适度指数并检测事件
  113. comfort.calculate_motion_comfort_index()
  114. # 统计事件类型为'motionComfortIndex'的事件次数
  115. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'motionComfortIndex'])
  116. return {"motionComfortIndex": float(count)}
  117. def calculate_ridequalityscore(data_processed) -> dict:
  118. """计算乘坐质量评分事件次数"""
  119. comfort = ComfortCalculator(data_processed)
  120. # 计算乘坐质量评分并检测事件
  121. comfort.calculate_ride_quality_score()
  122. # 统计事件类型为'rideQualityScore'的事件次数
  123. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'rideQualityScore'])
  124. return {"rideQualityScore": float(count)}
  125. def calculate_motionsickness(data_processed) -> dict:
  126. """计算晕车概率事件次数"""
  127. comfort = ComfortCalculator(data_processed)
  128. # 计算晕车概率并检测事件
  129. comfort.calculate_motion_sickness_probability()
  130. # 统计事件类型为'motionSickness'的事件次数
  131. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'motionSickness'])
  132. return {"motionSickness": float(count)}
  133. def calculate_vdv(data_processed) -> dict:
  134. """计算振动剂量值(VDV)事件次数"""
  135. comfort = ComfortCalculator(data_processed)
  136. # 计算VDV并检测事件
  137. comfort.calculate_vdv()
  138. # 统计事件类型为'vdv'的事件次数
  139. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'vdv'])
  140. return {"vdv": float(count)}
  141. def calculate_ava_vav(data_processed) -> dict:
  142. """计算多维度综合加权加速度事件次数"""
  143. comfort = ComfortCalculator(data_processed)
  144. # 计算AVA/VAV并检测事件
  145. comfort.calculate_ava_vav()
  146. # 统计事件类型为'ava_vav'的事件次数
  147. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'ava_vav'])
  148. return {"ava_vav": float(count)}
  149. def calculate_msdv(data_processed) -> dict:
  150. """计算晕动剂量值(MSDV)事件次数"""
  151. comfort = ComfortCalculator(data_processed)
  152. # 计算MSDV并检测事件
  153. comfort.calculate_msdv()
  154. # 统计事件类型为'msdv'的事件次数
  155. count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'msdv'])
  156. return {"msdv": float(count)}
  157. def calculate_zigzag(data_processed) -> dict:
  158. """计算蛇行指标"""
  159. comfort = ComfortCalculator(data_processed)
  160. zigzag_count = comfort.calculate_zigzag_count()
  161. return {"zigzag": float(zigzag_count)}
  162. def calculate_shake(data_processed) -> dict:
  163. """计算晃动指标"""
  164. comfort = ComfortCalculator(data_processed)
  165. shake_count = comfort.calculate_shake_count()
  166. return {"shake": float(shake_count)}
  167. def calculate_cadence(data_processed) -> dict:
  168. """计算顿挫指标"""
  169. comfort = ComfortCalculator(data_processed)
  170. cadence_count = comfort.calculate_cadence_count()
  171. return {"cadence": float(cadence_count)}
  172. def calculate_slambrake(data_processed) -> dict:
  173. """计算急刹车指标"""
  174. comfort = ComfortCalculator(data_processed)
  175. slam_brake_count = comfort.calculate_slam_brake_count()
  176. return {"slamBrake": float(slam_brake_count)}
  177. def calculate_slamaccelerate(data_processed) -> dict:
  178. """计算急加速指标"""
  179. comfort = ComfortCalculator(data_processed)
  180. slam_accel_count = comfort.calculate_slam_accel_count()
  181. return {"slamAccelerate": float(slam_accel_count)}
  182. # 装饰器保持不变
  183. def peak_valley_decorator(method):
  184. def wrapper(self, *args, **kwargs):
  185. peak_valley = self._peak_valley_determination(self.df)
  186. pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
  187. if len(pv_list) != 0:
  188. flag = True
  189. p_last = pv_list[0]
  190. for i in range(1, len(pv_list)):
  191. p_curr = pv_list[i]
  192. if self._peak_valley_judgment(p_last, p_curr):
  193. # method(self, p_curr, p_last)
  194. method(self, p_curr, p_last, flag, *args, **kwargs)
  195. else:
  196. p_last = p_curr
  197. return method
  198. else:
  199. flag = False
  200. p_curr = [0, 0]
  201. p_last = [0, 0]
  202. method(self, p_curr, p_last, flag, *args, **kwargs)
  203. return method
  204. return wrapper
  205. class ComfortRegistry:
  206. """舒适性指标注册器"""
  207. def __init__(self, data_processed):
  208. self.logger = LogManager().get_logger() # 获取全局日志实例
  209. self.data = data_processed
  210. self.comfort_config = data_processed.comfort_config["comfort"]
  211. self.metrics = self._extract_metrics(self.comfort_config)
  212. self._registry = self._build_registry()
  213. self.output_dir = None # 图表数据输出目录
  214. def _extract_metrics(self, config_node: dict) -> list:
  215. """DFS遍历提取指标"""
  216. metrics = []
  217. def _recurse(node):
  218. if isinstance(node, dict):
  219. if 'name' in node and not any(isinstance(v, dict) for v in node.values()):
  220. metrics.append(node['name'])
  221. for v in node.values():
  222. _recurse(v)
  223. _recurse(config_node)
  224. self.logger.info(f'评比的舒适性指标列表:{metrics}')
  225. return metrics
  226. def _build_registry(self) -> dict:
  227. """自动注册指标函数"""
  228. registry = {}
  229. for metric_name in self.metrics:
  230. func_name = f"calculate_{metric_name.lower()}"
  231. try:
  232. registry[metric_name] = globals()[func_name]
  233. except KeyError:
  234. self.logger.error(f"未实现指标函数: {func_name}")
  235. return registry
  236. def batch_execute(self) -> dict:
  237. """批量执行指标计算"""
  238. results = {}
  239. for name, func in self._registry.items():
  240. try:
  241. result = func(self.data)
  242. results.update(result)
  243. # 新增:将每个指标的结果写入日志
  244. self.logger.info(f'舒适性指标[{name}]计算结果: {result}')
  245. except Exception as e:
  246. self.logger.error(f"{name} 执行失败: {str(e)}", exc_info=True)
  247. results[name] = None
  248. self.logger.info(f'舒适性指标计算结果:{results}')
  249. return results
  250. class ComfortCalculator:
  251. """舒适性指标计算类 - 提供核心计算功能"""
  252. def generate_metric_chart(self, metric_name: str) -> None:
  253. """
  254. 生成指标图表
  255. Args:
  256. metric_name: 指标名称
  257. """
  258. # 设置输出目录
  259. if not hasattr(self, 'output_dir') or not self.output_dir:
  260. self.output_dir = os.path.join(os.getcwd(), 'data')
  261. os.makedirs(self.output_dir, exist_ok=True)
  262. # 调用chart_generator中的函数生成图表
  263. chart_path = generate_comfort_chart_data(self, metric_name, self.output_dir)
  264. if chart_path:
  265. self.logger.info(f"{metric_name}图表已生成: {chart_path}")
  266. def __init__(self, data_processed):
  267. self.data_processed = data_processed
  268. self.logger = LogManager().get_logger()
  269. self.data = data_processed.ego_data
  270. self.ego_df = pd.DataFrame()
  271. self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
  272. # 统计指标
  273. self.calculated_value = {
  274. 'zigzag': 0,
  275. 'shake': 0,
  276. 'cadence': 0,
  277. 'slamBrake': 0,
  278. 'slamAccelerate': 0,
  279. 'ava_vav': 0, # 添加新指标的默认值
  280. 'msdv': 0, # 添加MSDV指标的默认值
  281. 'motionSickness': 0, # 添加晕车概率指标的默认值
  282. 'vdv:': 0,
  283. 'motionComfortIndex': 0, # 新增指标
  284. 'rideQualityScore': 0 # 新增指标
  285. }
  286. self.time_list = self.data['simTime'].values.tolist()
  287. self.frame_list = self.data['simFrame'].values.tolist()
  288. self.zigzag_count = 0
  289. self.shake_count = 0
  290. self.cadence_count = 0
  291. self.slam_brake_count = 0
  292. self.slam_accel_count = 0
  293. self.zigzag_time_list = []
  294. self.zigzag_stre_list = []
  295. self.shake_events = [] # 用于存储晃动事件数据
  296. self._initialize_data()
  297. def _initialize_data(self):
  298. """初始化数据"""
  299. self.ego_df = self.data[COMFORT_INFO].copy()
  300. self.df = self.ego_df.reset_index(drop=True)
  301. self._prepare_comfort_parameters()
  302. def _prepare_comfort_parameters(self):
  303. """准备舒适性计算所需参数"""
  304. # 计算加减速阈值 - 使用车辆坐标系下的纵向速度代替合速度
  305. speed_field = 'lon_v_vehicle' if 'lon_v_vehicle' in self.ego_df.columns else 'v'
  306. self.logger.info(f"加减速阈值计算使用的速度字段: {speed_field}")
  307. self.ego_df['ip_acc'] = self.ego_df[speed_field].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
  308. self.ego_df['ip_dec'] = self.ego_df[speed_field].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
  309. # 使用车辆坐标系下的纵向加速度计算急刹车和急加速
  310. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in self.ego_df.columns else 'lon_acc'
  311. self.logger.info(f"急刹车和急加速检测使用的加速度字段: {acc_field}")
  312. # 使用车辆坐标系下的纵向加速度与阈值比较,判断急刹车和急加速
  313. self.ego_df['slam_brake'] = (self.ego_df[acc_field] - self.ego_df['ip_dec']).apply(
  314. lambda x: 1 if x < 0 else 0)
  315. self.ego_df['slam_accel'] = (self.ego_df[acc_field] - self.ego_df['ip_acc']).apply(
  316. lambda x: 1 if x > 0 else 0)
  317. # 确保cadence列使用车辆坐标系下的纵向加速度计算
  318. self.ego_df['cadence'] = self.ego_df.apply(
  319. lambda row: self._cadence_process_new(row[acc_field], row['ip_acc'], row['ip_dec']), axis=1)
  320. def _apply_frequency_weighting(self, acceleration_data, weighting_type='Wk', fs=100):
  321. """应用ISO 2631-1:1997标准的频率加权滤波
  322. 参数:
  323. acceleration_data: 加速度时间序列数据
  324. weighting_type: 加权类型,可选值包括:
  325. - 'Wk': 垂直方向(Z轴)加权
  326. - 'Wd': 水平方向(X和Y轴)加权
  327. - 'Wf': 运动病相关加权
  328. fs: 采样频率(Hz)
  329. 返回:
  330. 加权后的加速度数据
  331. """
  332. # 检查数据有效性
  333. if acceleration_data.empty or acceleration_data.isna().all():
  334. return acceleration_data
  335. # 根据ISO 2631-1:1997标准设计滤波器
  336. # 这些参数来自标准文档,用于构建数字滤波器
  337. if weighting_type == 'Wk': # 垂直方向(Z轴)
  338. # Wk滤波器参数
  339. f1 = 0.4
  340. f2 = 100.0
  341. f3 = 12.5
  342. f4 = 12.5
  343. Q1 = 0.63
  344. Q2 = 0.5
  345. Q3 = 0.63
  346. Q4 = 0.63
  347. K = 0.4
  348. elif weighting_type == 'Wd': # 水平方向(X和Y轴)
  349. # Wd滤波器参数
  350. f1 = 0.4
  351. f2 = 100.0
  352. f3 = 2.0
  353. f4 = 2.0
  354. Q1 = 0.63
  355. Q2 = 0.5
  356. Q3 = 0.63
  357. Q4 = 0.63
  358. K = 0.4
  359. elif weighting_type == 'Wf': # 运动病相关
  360. # Wf滤波器参数
  361. f1 = 0.08
  362. f2 = 0.63
  363. f3 = 0.25
  364. f4 = 0.8
  365. Q1 = 0.63
  366. Q2 = 0.86
  367. Q3 = 0.8
  368. Q4 = 0.8
  369. K = 1.0
  370. else:
  371. self.logger.warning(f"未知的加权类型: {weighting_type},使用原始数据")
  372. return acceleration_data
  373. # 将频率转换为角频率
  374. w1 = 2 * np.pi * f1
  375. w2 = 2 * np.pi * f2
  376. w3 = 2 * np.pi * f3
  377. w4 = 2 * np.pi * f4
  378. # 设计高通滤波器(s域)
  379. b1 = [K * w1 ** 2, 0]
  380. a1 = [1, w1 / Q1, w1 ** 2]
  381. # 设计低通滤波器(s域)
  382. b2 = [K, 0, 0]
  383. a2 = [1, w2 / Q2, w2 ** 2]
  384. # 设计加速度-速度转换滤波器(s域)
  385. b3 = [K, 0]
  386. a3 = [1, w3 / Q3, w3 ** 2]
  387. # 设计上升滤波器(s域)
  388. b4 = [K, 0, 0]
  389. a4 = [1, w4 / Q4, w4 ** 2]
  390. # 使用双线性变换将s域滤波器转换为z域
  391. b1_z, a1_z = scipy.signal.bilinear(b1, a1, fs)
  392. b2_z, a2_z = scipy.signal.bilinear(b2, a2, fs)
  393. b3_z, a3_z = scipy.signal.bilinear(b3, a3, fs)
  394. b4_z, a4_z = scipy.signal.bilinear(b4, a4, fs)
  395. # 应用滤波器链
  396. data_np = acceleration_data.to_numpy()
  397. filtered_data = scipy.signal.lfilter(b1_z, a1_z, data_np)
  398. filtered_data = scipy.signal.lfilter(b2_z, a2_z, filtered_data)
  399. filtered_data = scipy.signal.lfilter(b3_z, a3_z, filtered_data)
  400. filtered_data = scipy.signal.lfilter(b4_z, a4_z, filtered_data)
  401. return pd.Series(filtered_data, index=acceleration_data.index)
  402. def calculate_motion_comfort_index(self):
  403. """
  404. 计算运动舒适度指数(Motion Comfort Index)并检测低舒适度事件
  405. 使用车辆坐标系中的加速度与角速度数据,评估车辆运行过程中的乘坐舒适性。
  406. 返回范围:0~10,10为最舒适。
  407. """
  408. # 实际计算运动舒适度指数
  409. comfort_index = self._calculate_motion_comfort_index()
  410. # 直接设置阈值
  411. self._detect_threshold_events(
  412. comfort_index,
  413. 'motionComfortIndex',
  414. min_threshold=8.0, # 硬编码阈值
  415. max_threshold=10.0
  416. )
  417. return comfort_index
  418. def _calculate_motion_comfort_index(self):
  419. """实际计算运动舒适度指数"""
  420. df = self.ego_df.copy()
  421. # 检查必要字段
  422. required_cols = ['lon_acc_vehicle', 'lat_acc_vehicle']
  423. if not all(col in df.columns for col in required_cols):
  424. self.logger.warning("缺少车辆坐标系下的加速度列,无法计算运动舒适度")
  425. return 8.0
  426. # 合成加速度模长
  427. df['accel_magnitude'] = np.sqrt(df['lon_acc_vehicle'] ** 2 + df['lat_acc_vehicle'] ** 2)
  428. if 'acc_z_vehicle' in df.columns:
  429. df['accel_magnitude'] = np.sqrt(df['accel_magnitude'] ** 2 + df['acc_z_vehicle'] ** 2)
  430. # 时间差
  431. df['time_diff'] = df['simTime'].diff().fillna(0.01)
  432. # 加加速度(Jerk)
  433. df['jerk_lon'] = df['lon_acc_vehicle'].diff() / df['time_diff']
  434. df['jerk_lat'] = df['lat_acc_vehicle'].diff() / df['time_diff']
  435. df['jerk_magnitude'] = np.sqrt(df['jerk_lon'] ** 2 + df['jerk_lat'] ** 2)
  436. if 'acc_z_vehicle' in df.columns:
  437. df['jerk_z'] = df['acc_z_vehicle'].diff() / df['time_diff']
  438. df['jerk_magnitude'] = np.sqrt(df['jerk_magnitude'] ** 2 + df['jerk_z'] ** 2)
  439. # 角速度模长(以 rollRate, pitchRate, speedH 合成)
  440. omega_roll = df['rollRate'] if 'rollRate' in df.columns else pd.Series(np.zeros(len(df)))
  441. omega_pitch = df['pitchRate'] if 'pitchRate' in df.columns else pd.Series(np.zeros(len(df)))
  442. omega_yaw = df['speedH']
  443. df['angular_velocity'] = np.sqrt(omega_roll ** 2 + omega_pitch ** 2 + omega_yaw ** 2)
  444. # RMS 计算
  445. accel_rms = np.sqrt(np.mean(df['accel_magnitude'] ** 2))
  446. jerk_rms = np.sqrt(np.mean(df['jerk_magnitude'] ** 2))
  447. angular_rms = np.sqrt(np.mean(df['angular_velocity'] ** 2))
  448. # 阈值与权重
  449. accel_threshold = 2.0 # m/s²
  450. jerk_threshold = 1.0 # m/s³
  451. angular_threshold = 0.2 # rad/s
  452. accel_weight = 0.5
  453. jerk_weight = 0.3
  454. angular_weight = 0.2
  455. # 分数计算(0-10)
  456. accel_score = 10 * np.exp(-max(0, accel_rms - accel_threshold) / accel_threshold)
  457. jerk_score = 10 * np.exp(-max(0, jerk_rms - jerk_threshold) / jerk_threshold)
  458. angular_score = 10 * np.exp(-max(0, angular_rms - angular_threshold) / angular_threshold)
  459. comfort_index = (accel_weight * accel_score +
  460. jerk_weight * jerk_score +
  461. angular_weight * angular_score)
  462. comfort_index = np.clip(comfort_index, 0, 10)
  463. self.calculated_value['motionComfortIndex'] = comfort_index
  464. self.logger.info(f"运动舒适度指数(Motion Comfort Index): {comfort_index:.2f}/10")
  465. self.logger.info(
  466. f"加速度RMS: {accel_rms:.4f} m/s², 加加速度RMS: {jerk_rms:.4f} m/s³, 角速度RMS: {angular_rms:.4f} rad/s")
  467. return comfort_index
  468. def calculate_ride_quality_score(self):
  469. """
  470. 计算乘坐质量评分(Ride Quality Score)并检测低质量事件
  471. 基于 ISO 2631 标准,主要参考垂直振动(Z轴),用于评估车辆在颠簸路段的舒适性。
  472. 返回范围:0~100。
  473. 同时检测评分低于阈值(60)的事件
  474. """
  475. # 实际计算乘坐质量评分
  476. ride_quality_score = self._calculate_ride_quality_score()
  477. # 直接设置阈值
  478. self._detect_threshold_events(
  479. ride_quality_score,
  480. 'rideQualityScore',
  481. min_threshold=60.0, # 硬编码阈值
  482. max_threshold=100.0
  483. )
  484. return ride_quality_score
  485. def _calculate_ride_quality_score(self):
  486. """实际计算乘坐质量评分"""
  487. df = self.ego_df.copy()
  488. if 'acc_z_vehicle' not in df.columns:
  489. self.logger.warning("缺少垂直方向加速度(acc_z_vehicle),无法计算乘坐质量评分")
  490. return 70.0
  491. a_z_body = df['acc_z_vehicle']
  492. # 采样频率估计
  493. if len(df) > 1:
  494. time_diff = df['simTime'].diff().median()
  495. fs = 1.0 / time_diff if time_diff > 0 else 100
  496. else:
  497. fs = 100
  498. # ISO 2631 加权滤波
  499. a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wk', fs)
  500. # 计算 RMS
  501. a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
  502. # ISO 等级评分
  503. if a_z_rms < 0.315:
  504. base_score = 90
  505. elif a_z_rms < 0.63:
  506. base_score = 80
  507. elif a_z_rms < 1.0:
  508. base_score = 70
  509. elif a_z_rms < 1.6:
  510. base_score = 60
  511. elif a_z_rms < 2.5:
  512. base_score = 40
  513. else:
  514. base_score = 20
  515. # 评分调节因子
  516. duration_factor = min(1.0, 10.0 / (df['simTime'].max() - df['simTime'].min()))
  517. if len(a_z_weighted) > 50:
  518. f, psd = self._calculate_psd(a_z_weighted, fs)
  519. sensitive_mask = (f >= 4) & (f <= 8)
  520. sensitive_energy = np.sum(psd[sensitive_mask])
  521. total_energy = np.sum(psd)
  522. frequency_factor = 1.0 - 0.3 * (sensitive_energy / total_energy if total_energy > 0 else 0)
  523. else:
  524. frequency_factor = 1.0
  525. ride_quality_score = base_score * duration_factor * frequency_factor
  526. ride_quality_score = np.clip(ride_quality_score, 0, 100)
  527. self.calculated_value['rideQualityScore'] = ride_quality_score
  528. self.logger.info(f"乘坐质量评分(Ride Quality Score): {ride_quality_score:.2f}/100")
  529. self.logger.info(f"垂直加速度RMS: {a_z_rms:.4f} m/s²")
  530. # self.generate_metric_chart('rideQualityScore')
  531. return ride_quality_score
  532. def calculate_motion_sickness_probability(self):
  533. """计算晕车概率指标并检测高概率事件"""
  534. # 实际计算晕车概率
  535. motion_sickness_prob = self._calculate_motion_sickness_probability()
  536. # 直接设置阈值
  537. self._detect_threshold_events(
  538. motion_sickness_prob,
  539. 'motionSickness',
  540. min_threshold=0.0,
  541. max_threshold=30.0 # 硬编码阈值
  542. )
  543. # self.generate_metric_chart('motionsickness')
  544. return motion_sickness_prob
  545. def _calculate_motion_sickness_probability(self):
  546. """实际计算晕车概率指标"""
  547. # 获取数据
  548. df = self.ego_df.copy()
  549. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  550. df['posH_rad'] = np.radians(df['posH'])
  551. # 转换加速度到车身坐标系
  552. df['a_x_body'] = df['lon_acc_vehicle']
  553. df['a_y_body'] = df['lat_acc_vehicle']
  554. # Z方向加速度,如果没有则假设为0
  555. df['a_z_body'] = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
  556. # 计算时间差
  557. df['time_diff'] = df['simTime'].diff().fillna(0)
  558. # 估计采样频率
  559. if len(df) > 1:
  560. time_diff = df['simTime'].diff().median()
  561. fs = 1.0 / time_diff if time_diff > 0 else 100
  562. else:
  563. fs = 100
  564. # 对各方向加速度应用适当的频率加权
  565. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wf', fs)
  566. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wf', fs)
  567. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wf', fs)
  568. # 计算加加速度(Jerk)
  569. df['jerk_x'] = a_x_weighted.diff() / df['time_diff']
  570. df['jerk_y'] = a_y_weighted.diff() / df['time_diff']
  571. df['jerk_z'] = a_z_weighted.diff() / df['time_diff']
  572. # 填充NaN值
  573. df[['jerk_x', 'jerk_y', 'jerk_z']] = df[['jerk_x', 'jerk_y', 'jerk_z']].fillna(0)
  574. # 计算Jerk的均方根值(RMS)
  575. jerk_squared_sum = df['jerk_x'] ** 2 + df['jerk_y'] ** 2 + df['jerk_z'] ** 2
  576. jerk_rms = np.sqrt(np.mean(jerk_squared_sum))
  577. # 计算加速度平方和的均值
  578. accel_squared_sum = a_x_weighted ** 2 + a_y_weighted ** 2 + a_z_weighted ** 2
  579. accel_squared_mean = np.mean(accel_squared_sum)
  580. # 设置模型参数
  581. alpha = 0.1 # 加速度权重(s⁴/m²)
  582. beta = 0.5 # Jerk权重(s²/m²)
  583. gamma = 10.0 # 归一化因子(m²/s⁴)
  584. # 计算晕车概率
  585. acceleration_term = alpha * accel_squared_mean
  586. jerk_term = beta * jerk_rms
  587. score = (acceleration_term + jerk_term) / gamma
  588. probability = 100 * (1 - np.exp(-score))
  589. # 限制在0-100%范围内
  590. probability = np.clip(probability, 0, 100)
  591. # 记录计算结果
  592. self.calculated_value['motionSickness'] = probability
  593. self.logger.info(f"晕车概率(Motion Sickness Probability)计算结果: {probability:.2f}%")
  594. self.logger.info(f"加速度平方和均值: {accel_squared_mean:.4f} m²/s⁴, Jerk均方根值: {jerk_rms:.4f} m/s³")
  595. return probability
  596. def calculate_vdv(self):
  597. """计算振动剂量值(Vibration Dose Value, VDV)指标并检测高VDV事件"""
  598. # 实际计算VDV
  599. vdv_value = self._calculate_vdv()
  600. # 直接设置阈值
  601. self._detect_threshold_events(
  602. vdv_value,
  603. 'vdv',
  604. min_threshold=0.0,
  605. max_threshold=8.0 # 硬编码阈值
  606. )
  607. # self.generate_metric_chart('vdv')
  608. return vdv_value
  609. def _calculate_vdv(self):
  610. """实际计算振动剂量值"""
  611. # 获取数据
  612. df = self.ego_df.copy()
  613. # 使用车身坐标系下的加速度数据
  614. if 'lon_acc_vehicle' in df.columns and 'lat_acc_vehicle' and 'acc_z_vehicle' in df.columns:
  615. # 使用已转换的数据
  616. a_x_body = df['lon_acc_vehicle']
  617. a_y_body = df['lat_acc_vehicle']
  618. a_z_body = df['acc_z_vehicle']
  619. else:
  620. self.logger.warning("缺少lon_acc_vehicle等数据,无法进行坐标转换")
  621. return self.calculated_value['vdv']
  622. # 计算时间差
  623. df['time_diff'] = df['simTime'].diff().fillna(0)
  624. # 估计采样频率
  625. if len(df) > 1:
  626. time_diff = df['simTime'].diff().median()
  627. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  628. else:
  629. fs = 100 # 默认采样频率
  630. # 对各方向加速度应用适当的频率加权
  631. a_x_weighted = self._apply_frequency_weighting(a_x_body, 'Wd', fs) # 水平方向使用Wd
  632. a_y_weighted = self._apply_frequency_weighting(a_y_body, 'Wd', fs) # 水平方向使用Wd
  633. a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wk', fs) # 垂直方向使用Wk
  634. # 计算加权均方根值 (r.m.s.)
  635. a_x_rms = np.sqrt(np.mean(a_x_weighted ** 2))
  636. a_y_rms = np.sqrt(np.mean(a_y_weighted ** 2))
  637. a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
  638. # 记录r.m.s.值用于参考
  639. self.logger.info(f"X方向加权均方根值: {a_x_rms}")
  640. self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
  641. self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
  642. # 计算VDV - 对加速度四次方进行时间积分,再开四次方根
  643. # 对于X方向(前后方向)
  644. vdv_x = np.power(np.sum(np.power(np.abs(a_x_weighted), 4) * df['time_diff']), 0.25)
  645. # 对于Y方向(左右方向)
  646. vdv_y = np.power(np.sum(np.power(np.abs(a_y_weighted), 4) * df['time_diff']), 0.25)
  647. # 对于Z方向(上下方向)
  648. vdv_z = np.power(np.sum(np.power(np.abs(a_z_weighted), 4) * df['time_diff']), 0.25)
  649. # 综合VDV - 可以使用向量和或加权和
  650. # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
  651. vdv = np.sqrt(vdv_x ** 2 + vdv_y ** 2 + (1.4 * vdv_z) ** 2)
  652. # 记录计算结果
  653. self.calculated_value['vdv'] = vdv
  654. self.logger.info(f"振动剂量值(VDV)计算结果: {vdv}")
  655. self.logger.info(f"X方向VDV: {vdv_x}, Y方向VDV: {vdv_y}, Z方向VDV: {vdv_z}")
  656. # 生成VDV指标图表
  657. # self.generate_metric_chart('vdv')
  658. return vdv
  659. def calculate_ava_vav(self):
  660. """计算多维度综合加权加速度并检测高值事件"""
  661. # 实际计算AVA/VAV
  662. ava_vav_value = self._calculate_ava_vav()
  663. # 检测高值事件
  664. # 直接设置阈值
  665. self._detect_threshold_events(
  666. ava_vav_value,
  667. 'ava_vav',
  668. min_threshold=0.0,
  669. max_threshold=0.63 # 硬编码阈值
  670. )
  671. # self.generate_metric_chart('ava_vav')
  672. return ava_vav_value
  673. def _calculate_ava_vav(self):
  674. """实际计算多维度综合加权加速度"""
  675. # 定义各方向的权重系数
  676. k_x = 1.0 # X方向加速度权重
  677. k_y = 1.0 # Y方向加速度权重
  678. k_z = 1.0 # Z方向加速度权重
  679. k_roll = 0.63 # 横滚角速度权重
  680. k_pitch = 0.8 # 俯仰角速度权重
  681. k_yaw = 0.5 # 偏航角速度权重
  682. # 获取数据
  683. df = self.ego_df.copy()
  684. # 计算时间差
  685. df['time_diff'] = df['simTime'].diff().fillna(0)
  686. df['a_x_body'] = df['lon_acc_vehicle']
  687. df['a_y_body'] = df['lat_acc_vehicle']
  688. # Z方向加速度,如果没有则假设为0
  689. df['a_z_body'] = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
  690. # 角速度数据,如果没有则使用角速度变化率代替
  691. # 注意:speedH是航向角速度,需要转换为车身坐标系下的偏航角速度
  692. omega_roll = df['rollRate'] if 'rollRate' in df.columns else pd.Series(np.zeros(len(df)))
  693. omega_pitch = df['pitchRate'] if 'pitchRate' in df.columns else pd.Series(np.zeros(len(df)))
  694. omega_yaw = df['speedH'] # 使用航向角速度作为偏航角速度
  695. # 应用ISO 2631-1:1997标准的频率加权滤波
  696. # 估计采样频率 - 假设数据是均匀采样的
  697. if len(df) > 1:
  698. time_diff = df['simTime'].diff().median()
  699. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  700. else:
  701. fs = 100 # 默认采样频率
  702. # 对各方向加速度应用适当的频率加权
  703. a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wd', fs)
  704. a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wd', fs)
  705. a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wk', fs)
  706. # 对角速度也应用适当的频率加权
  707. # 注意:ISO标准没有直接指定角速度的加权,这里使用简化处理
  708. omega_roll_weighted = omega_roll # 可以根据需要应用适当的滤波
  709. omega_pitch_weighted = omega_pitch
  710. omega_yaw_weighted = omega_yaw
  711. # 计算加权均方根值 (r.m.s.)
  712. # 对每个方向的加速度/角速度平方后求平均,再开平方根
  713. a_x_rms = np.sqrt(np.mean(a_x_weighted ** 2))
  714. a_y_rms = np.sqrt(np.mean(a_y_weighted ** 2))
  715. a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
  716. omega_roll_rms = np.sqrt(np.mean(omega_roll_weighted ** 2))
  717. omega_pitch_rms = np.sqrt(np.mean(omega_pitch_weighted ** 2))
  718. omega_yaw_rms = np.sqrt(np.mean(omega_yaw_weighted ** 2))
  719. # 计算综合加权加速度
  720. ava_vav = np.sqrt(
  721. k_x * a_x_rms ** 2 +
  722. k_y * a_y_rms ** 2 +
  723. k_z * a_z_rms ** 2 +
  724. k_roll * omega_roll_rms ** 2 +
  725. k_pitch * omega_pitch_rms ** 2 +
  726. k_yaw * omega_yaw_rms ** 2
  727. )
  728. # 记录计算结果
  729. self.calculated_value['ava_vav'] = ava_vav
  730. self.logger.info(f"多维度综合加权加速度(ava_vav)计算结果: {ava_vav}")
  731. return ava_vav
  732. def calculate_msdv(self):
  733. """计算晕动剂量值(Motion Sickness Dose Value, MSDV)指标并检测高值事件"""
  734. # 实际计算MSDV
  735. msdv_value = self._calculate_msdv()
  736. # 检测高值事件
  737. # 直接设置阈值
  738. self._detect_threshold_events(
  739. msdv_value,
  740. 'msdv',
  741. min_threshold=0.0,
  742. max_threshold=6.0 # 硬编码阈值
  743. )
  744. # self.generate_metric_chart('msdv')
  745. return msdv_value
  746. def _calculate_msdv(self):
  747. """实际计算晕动剂量值"""
  748. # 获取数据
  749. df = self.ego_df.copy()
  750. # 使用车身坐标系下的加速度数据
  751. if 'lon_acc_vehicle' in df.columns and 'lat_acc_vehicle' in df.columns:
  752. # 使用已转换的数据
  753. a_x_body = df['lon_acc_vehicle']
  754. a_y_body = df['lat_acc_vehicle']
  755. a_z_body = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
  756. else:
  757. self.logger.warning("缺少lon_acc_vehicle和lat_acc_vehicle数据,无法进行坐标转换")
  758. return self.calculated_value['msdv']
  759. # 计算时间差
  760. df['time_diff'] = df['simTime'].diff().fillna(0)
  761. total_time = df['time_diff'].sum()
  762. # 估计采样频率
  763. if len(df) > 1:
  764. time_diff = df['simTime'].diff().median()
  765. fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
  766. else:
  767. fs = 100 # 默认采样频率
  768. # 对各方向加速度应用适当的频率加权
  769. # 对于晕动评估,使用Wf加权滤波器
  770. a_x_weighted = self._apply_frequency_weighting(a_x_body, 'Wf', fs)
  771. a_y_weighted = self._apply_frequency_weighting(a_y_body, 'Wf', fs)
  772. a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wf', fs)
  773. # 先计算加权均方根值 (r.m.s.)
  774. a_x_rms = np.sqrt(np.sum(a_x_weighted ** 2 * df['time_diff']) / total_time)
  775. a_y_rms = np.sqrt(np.sum(a_y_weighted ** 2 * df['time_diff']) / total_time)
  776. a_z_rms = np.sqrt(np.sum(a_z_weighted ** 2 * df['time_diff']) / total_time)
  777. # 记录r.m.s.值用于参考
  778. self.logger.info(f"X方向加权均方根值: {a_x_rms}")
  779. self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
  780. self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
  781. # 计算MSDV - 基于r.m.s.值和总时间
  782. msdv_x = a_x_rms * np.sqrt(total_time)
  783. msdv_y = a_y_rms * np.sqrt(total_time)
  784. msdv_z = a_z_rms * np.sqrt(total_time)
  785. # 综合MSDV - 可以使用向量和或加权和
  786. # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
  787. msdv = np.sqrt(msdv_x ** 2 + msdv_y ** 2 + (1.4 * msdv_z) ** 2)
  788. # 记录计算结果
  789. self.calculated_value['msdv'] = msdv
  790. self.logger.info(f"晕动剂量值(MSDV)计算结果: {msdv}")
  791. self.logger.info(f"X方向MSDV: {msdv_x}, Y方向MSDV: {msdv_y}, Z方向MSDV: {msdv_z}")
  792. # 生成MSDV指标图表
  793. # self.generate_metric_chart('msdv')
  794. return msdv
  795. def calculate_zigzag_count(self):
  796. """计算蛇行指标并检测事件"""
  797. # 原有的计算逻辑
  798. self._zigzag_detector()
  799. # 检测蛇行事件
  800. zigzag_events = self._detect_zigzag_events()
  801. self.generate_metric_chart('zigzag')
  802. # 返回事件次数
  803. return len(zigzag_events)
  804. def _detect_zigzag_events(self):
  805. """检测蛇行事件"""
  806. # 获取蛇行时间列表
  807. if not self.zigzag_time_list:
  808. return []
  809. # 创建事件列表
  810. events = []
  811. for time_range in self.zigzag_time_list:
  812. start_time, end_time = time_range
  813. start_frame = get_frame_with_time(self.time_list, self.frame_list, start_time)
  814. end_frame = get_frame_with_time(self.time_list, self.frame_list, end_time)
  815. events.append({
  816. 'start_time': start_time,
  817. 'end_time': end_time,
  818. 'start_frame': start_frame,
  819. 'end_frame': end_frame,
  820. 'type': 'zigzag'
  821. })
  822. # 添加到不舒适事件表
  823. new_row = pd.DataFrame([{
  824. 'start_time': start_time,
  825. 'end_time': end_time,
  826. 'start_frame': start_frame,
  827. 'end_frame': end_frame,
  828. 'type': 'zigzag'
  829. }])
  830. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  831. return events
  832. def calculate_shake_count(self):
  833. """计算晃动指标并检测事件"""
  834. # 原有的计算逻辑
  835. self._shake_detector()
  836. # 检测晃动事件
  837. shake_events = self._detect_shake_events()
  838. self.generate_metric_chart('shake')
  839. # 返回事件次数
  840. return len(shake_events)
  841. def _detect_shake_events(self):
  842. """检测晃动事件"""
  843. # 获取晃动事件数据
  844. if not self.shake_events:
  845. return []
  846. # 创建事件列表
  847. events = []
  848. for event in self.shake_events:
  849. events.append({
  850. 'start_time': event['start_time'],
  851. 'end_time': event['end_time'],
  852. 'start_frame': event['start_frame'],
  853. 'end_frame': event['end_frame'],
  854. 'type': 'shake'
  855. })
  856. # 添加到不舒适事件表
  857. new_row = pd.DataFrame([{
  858. 'start_time': event['start_time'],
  859. 'end_time': event['end_time'],
  860. 'start_frame': event['start_frame'],
  861. 'end_frame': event['end_frame'],
  862. 'type': 'shake'
  863. }])
  864. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  865. return events
  866. def calculate_cadence_count(self):
  867. """计算顿挫指标并检测事件"""
  868. # 原有的计算逻辑
  869. cadence_events = self._cadence_detector()
  870. self.generate_metric_chart('cadence')
  871. # 返回事件次数
  872. return len(cadence_events)
  873. def _cadence_detector(self):
  874. """检测顿挫事件"""
  875. # 原有的检测逻辑
  876. df = self.ego_df.copy()
  877. # 检查必要字段是否存在
  878. required_fields = ['simTime', 'simFrame', 'cadence']
  879. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
  880. if not all(field in df.columns for field in required_fields + [acc_field]):
  881. missing_fields = [field for field in required_fields + [acc_field] if field not in df.columns]
  882. self.logger.warning(f"顿挫检测缺少必要字段: {missing_fields},无法进行检测")
  883. self.cadence_count = 0
  884. return []
  885. # 提取必要字段
  886. df = df[['simTime', 'simFrame', acc_field, 'cadence']].copy()
  887. # 重命名列以保持代码一致性
  888. df.rename(columns={acc_field: 'acc_used'}, inplace=True)
  889. # 滤除无效cadence值
  890. df = df[df['cadence'].notna()].copy()
  891. df['cadence_diff'] = df['cadence'].diff()
  892. df.dropna(subset=['cadence_diff'], inplace=True)
  893. df = df[df['cadence_diff'] != 0]
  894. if df.empty:
  895. self.logger.info("未检测到明显cadence变化,未触发顿挫事件")
  896. self.cadence_count = 0
  897. return []
  898. # 提取突变点信息
  899. time_list = df['simTime'].tolist()
  900. frame_list = df['simFrame'].tolist()
  901. # 聚类突变点:按时间差小于 TIME_RANGE 分组
  902. TIME_RANGE = 1.0 # 秒
  903. grouped_times, grouped_frames = [], []
  904. temp_times, temp_frames = [], []
  905. for i in range(len(time_list)):
  906. if not temp_times or (time_list[i] - temp_times[-1] <= TIME_RANGE):
  907. temp_times.append(time_list[i])
  908. temp_frames.append(frame_list[i])
  909. else:
  910. if len(temp_times) >= 1:
  911. grouped_times.append(temp_times)
  912. grouped_frames.append(temp_frames)
  913. temp_times, temp_frames = [time_list[i]], [frame_list[i]]
  914. if len(temp_times) >= 1:
  915. grouped_times.append(temp_times)
  916. grouped_frames.append(temp_frames)
  917. # 只保留有效顿挫组
  918. cadence_time_ranges = [[g[0], g[-1]] for g in grouped_times]
  919. cadence_frame_ranges = [[g[0], g[-1]] for g in grouped_frames]
  920. # 输出结果到 discomfort_df
  921. for i in range(len(cadence_time_ranges)):
  922. start_time = cadence_time_ranges[i][0]
  923. end_time = cadence_time_ranges[i][1]
  924. start_frame = cadence_frame_ranges[i][0]
  925. end_frame = cadence_frame_ranges[i][1]
  926. new_row = pd.DataFrame([{
  927. 'start_time': start_time,
  928. 'end_time': end_time,
  929. 'start_frame': start_frame,
  930. 'end_frame': end_frame,
  931. 'type': 'cadence'
  932. }])
  933. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  934. # 计算强度与频率(用于量化)
  935. stre_list, freq_list = [], []
  936. for group_times in grouped_times:
  937. g_df = df[df['simTime'].isin(group_times)]
  938. strength = g_df['acc_used'].abs().mean()
  939. stre_list.append(strength)
  940. if len(group_times) > 1:
  941. t_delta = group_times[-1] - group_times[0]
  942. freq = len(group_times) / t_delta if t_delta > 0 else 0
  943. freq_list.append(freq)
  944. # 存储检测统计
  945. self.cadence_count = len(cadence_time_ranges)
  946. cadence_strength = np.mean(stre_list) if stre_list else 0
  947. self.logger.info(f"检测到 {self.cadence_count} 次顿挫事件,平均强度:{cadence_strength:.2f}")
  948. # 记录使用的加速度字段
  949. self.logger.info(f"顿挫检测使用的加速度字段: {acc_field}")
  950. return cadence_time_ranges
  951. def calculate_slam_brake_count(self):
  952. """计算急刹车指标并检测事件"""
  953. # 原有的计算逻辑
  954. self._slam_brake_detector()
  955. # 返回事件次数
  956. # 生成急刹车指标图表
  957. self.generate_metric_chart('slamBrake')
  958. return self.slam_brake_count
  959. def _slam_brake_detector(self):
  960. """检测急刹车事件"""
  961. # 原有的检测逻辑
  962. df = self.ego_df.copy()
  963. # 检查是否有必要的列
  964. if 'slam_brake' not in df.columns:
  965. self.logger.warning("缺少计算急刹车指标所需的数据列")
  966. return
  967. # 设置急刹车检测参数
  968. min_duration = 0.5 # 最小持续时间 秒
  969. # 检测连续的急刹车事件
  970. slam_brake_events = []
  971. in_event = False
  972. start_idx = 0
  973. for i, row in df.iterrows():
  974. if row['slam_brake'] == 1 and not in_event:
  975. # 开始新的急刹车事件
  976. in_event = True
  977. start_idx = i
  978. elif row['slam_brake'] == 0 and in_event:
  979. # 结束当前急刹车事件
  980. in_event = False
  981. end_idx = i - 1
  982. # 计算事件持续时间
  983. start_time = df.loc[start_idx, 'simTime']
  984. end_time = df.loc[end_idx, 'simTime']
  985. duration = end_time - start_time
  986. # 如果持续时间超过阈值,记录为有效急刹车事件
  987. if duration >= min_duration:
  988. # 确定使用的加速度字段
  989. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
  990. slam_brake_events.append({
  991. 'start_time': start_time,
  992. 'end_time': end_time,
  993. 'start_frame': df.loc[start_idx, 'simFrame'],
  994. 'end_frame': df.loc[end_idx, 'simFrame'],
  995. 'duration': duration,
  996. 'min_lon_acc': df.loc[start_idx:end_idx, acc_field].min()
  997. })
  998. # 添加到不舒适事件表
  999. new_row = pd.DataFrame([{
  1000. 'start_time': start_time,
  1001. 'end_time': end_time,
  1002. 'start_frame': df.loc[start_idx, 'simFrame'],
  1003. 'end_frame': df.loc[end_idx, 'simFrame'],
  1004. 'type': 'slam_brake'
  1005. }])
  1006. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1007. # 如果最后一个事件没有结束
  1008. if in_event:
  1009. end_idx = len(df) - 1
  1010. start_time = df.loc[start_idx, 'simTime']
  1011. end_time = df.loc[end_idx, 'simTime']
  1012. duration = end_time - start_time
  1013. if duration >= min_duration:
  1014. # 确定使用的加速度字段
  1015. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
  1016. slam_brake_events.append({
  1017. 'start_time': start_time,
  1018. 'end_time': end_time,
  1019. 'start_frame': df.loc[start_idx, 'simFrame'],
  1020. 'end_frame': df.loc[end_idx, 'simFrame'],
  1021. 'duration': duration,
  1022. 'min_lon_acc': df.loc[start_idx:end_idx, acc_field].min()
  1023. })
  1024. # 添加到不舒适事件表
  1025. new_row = pd.DataFrame([{
  1026. 'start_time': start_time,
  1027. 'end_time': end_time,
  1028. 'start_frame': df.loc[start_idx, 'simFrame'],
  1029. 'end_frame': df.loc[end_idx, 'simFrame'],
  1030. 'type': 'slam_brake'
  1031. }])
  1032. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1033. # 更新急刹车计数
  1034. self.slam_brake_count = len(slam_brake_events)
  1035. self.logger.info(f"检测到 {self.slam_brake_count} 次急刹车事件")
  1036. def calculate_slam_accel_count(self):
  1037. """计算急加速指标并检测事件"""
  1038. # 原有的计算逻辑
  1039. self._slam_accel_detector()
  1040. self.generate_metric_chart('slamaccelerate')
  1041. # 返回事件次数
  1042. return self.slam_accel_count
  1043. def _slam_accel_detector(self):
  1044. """检测急加速事件"""
  1045. # 原有的检测逻辑
  1046. df = self.ego_df.copy()
  1047. # 检查是否有必要的列
  1048. if 'slam_accel' not in df.columns:
  1049. self.logger.warning("缺少计算急加速指标所需的数据列")
  1050. return
  1051. # 设置急加速检测参数
  1052. min_duration = 0.5 # 最小持续时间 秒
  1053. # 检测连续的急加速事件
  1054. slam_accel_events = []
  1055. in_event = False
  1056. start_idx = 0
  1057. for i, row in df.iterrows():
  1058. if row['slam_accel'] == 1 and not in_event:
  1059. # 开始新的急加速事件
  1060. in_event = True
  1061. start_idx = i
  1062. elif row['slam_accel'] == 0 and in_event:
  1063. # 结束当前急加速事件
  1064. in_event = False
  1065. end_idx = i - 1
  1066. # 计算事件持续时间
  1067. start_time = df.loc[start_idx, 'simTime']
  1068. end_time = df.loc[end_idx, 'simTime']
  1069. duration = end_time - start_time
  1070. # 如果持续时间超过阈值,记录为有效急加速事件
  1071. if duration >= min_duration:
  1072. # 确定使用的加速度字段
  1073. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
  1074. slam_accel_events.append({
  1075. 'start_time': start_time,
  1076. 'end_time': end_time,
  1077. 'start_frame': df.loc[start_idx, 'simFrame'],
  1078. 'end_frame': df.loc[end_idx, 'simFrame'],
  1079. 'duration': duration,
  1080. 'max_lon_acc': df.loc[start_idx:end_idx, acc_field].max()
  1081. })
  1082. # 添加到不舒适事件表
  1083. new_row = pd.DataFrame([{
  1084. 'start_time': start_time,
  1085. 'end_time': end_time,
  1086. 'start_frame': df.loc[start_idx, 'simFrame'],
  1087. 'end_frame': df.loc[end_idx, 'simFrame'],
  1088. 'type': 'slam_accel'
  1089. }])
  1090. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1091. # 如果最后一个事件没有结束
  1092. if in_event:
  1093. end_idx = len(df) - 1
  1094. start_time = df.loc[start_idx, 'simTime']
  1095. end_time = df.loc[end_idx, 'simTime']
  1096. duration = end_time - start_time
  1097. if duration >= min_duration:
  1098. # 确定使用的加速度字段
  1099. acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
  1100. slam_accel_events.append({
  1101. 'start_time': start_time,
  1102. 'end_time': end_time,
  1103. 'start_frame': df.loc[start_idx, 'simFrame'],
  1104. 'end_frame': df.loc[end_idx, 'simFrame'],
  1105. 'duration': duration,
  1106. 'max_lon_acc': df.loc[start_idx:end_idx, acc_field].max()
  1107. })
  1108. # 添加到不舒适事件表
  1109. new_row = pd.DataFrame([{
  1110. 'start_time': start_time,
  1111. 'end_time': end_time,
  1112. 'start_frame': df.loc[start_idx, 'simFrame'],
  1113. 'end_frame': df.loc[end_idx, 'simFrame'],
  1114. 'type': 'slam_accel'
  1115. }])
  1116. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1117. # 更新急加速计数
  1118. self.slam_accel_count = len(slam_accel_events)
  1119. self.logger.info(f"检测到 {self.slam_accel_count} 次急加速事件")
  1120. # ========== 事件检测方法 ==========
  1121. # def _detect_low_comfort_events(self, value, event_type, threshold):
  1122. # """检测低舒适度事件"""
  1123. # if value < threshold:
  1124. # start_time = self.ego_df['simTime'].min()
  1125. # end_time = self.ego_df['simTime'].max()
  1126. # start_frame = self.ego_df['simFrame'].min()
  1127. # end_frame = self.ego_df['simFrame'].max()
  1128. # new_row = pd.DataFrame([{
  1129. # 'start_time': start_time,
  1130. # 'end_time': end_time,
  1131. # 'start_frame': start_frame,
  1132. # 'end_frame': end_frame,
  1133. # 'type': event_type
  1134. # }])
  1135. # self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1136. # self.logger.info(f"检测到{event_type}低值事件: {value:.2f} < {threshold}")
  1137. # def _detect_high_probability_events(self, value, event_type, threshold):
  1138. # """检测高概率事件"""
  1139. # if value > threshold:
  1140. # start_time = self.ego_df['simTime'].min()
  1141. # end_time = self.ego_df['simTime'].max()
  1142. # start_frame = self.ego_df['simFrame'].min()
  1143. # end_frame = self.ego_df['simFrame'].max()
  1144. # new_row = pd.DataFrame([{
  1145. # 'start_time': start_time,
  1146. # 'end_time': end_time,
  1147. # 'start_frame': start_frame,
  1148. # 'end_frame': end_frame,
  1149. # 'type': event_type
  1150. # }])
  1151. # self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1152. # self.logger.info(f"检测到{event_type}高值事件: {value:.2f} > {threshold}")
  1153. def _detect_threshold_events(self, value, event_type, min_threshold=None, max_threshold=None):
  1154. """
  1155. 检测阈值事件 - 统一处理低值和高值事件
  1156. 参数:
  1157. value: 指标计算值
  1158. event_type: 事件类型名称
  1159. min_threshold: 最小值阈值(低于此值触发事件)
  1160. max_threshold: 最大值阈值(高于此值触发事件)
  1161. """
  1162. trigger = False
  1163. reason = ""
  1164. # 检查是否低于最小值阈值
  1165. if min_threshold is not None and value < min_threshold:
  1166. trigger = True
  1167. reason = f"{value:.2f} < {min_threshold:.2f} (min threshold)"
  1168. # 检查是否高于最大值阈值
  1169. if max_threshold is not None and value > max_threshold:
  1170. trigger = True
  1171. reason = f"{value:.2f} > {max_threshold:.2f} (max threshold)"
  1172. # 如果触发事件,记录到不舒适事件表
  1173. if trigger:
  1174. start_time = self.ego_df['simTime'].min()
  1175. end_time = self.ego_df['simTime'].max()
  1176. start_frame = self.ego_df['simFrame'].min()
  1177. end_frame = self.ego_df['simFrame'].max()
  1178. new_row = pd.DataFrame([{
  1179. 'start_time': start_time,
  1180. 'end_time': end_time,
  1181. 'start_frame': start_frame,
  1182. 'end_frame': end_frame,
  1183. 'type': event_type
  1184. }])
  1185. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1186. self.logger.info(f"检测到{event_type}事件: {reason}")
  1187. def _detect_high_vdv_events(self, value, threshold):
  1188. """检测高VDV事件"""
  1189. if value > threshold:
  1190. start_time = self.ego_df['simTime'].min()
  1191. end_time = self.ego_df['simTime'].max()
  1192. start_frame = self.ego_df['simFrame'].min()
  1193. end_frame = self.ego_df['simFrame'].max()
  1194. new_row = pd.DataFrame([{
  1195. 'start_time': start_time,
  1196. 'end_time': end_time,
  1197. 'start_frame': start_frame,
  1198. 'end_frame': end_frame,
  1199. 'type': 'vdv'
  1200. }])
  1201. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1202. self.logger.info(f"检测到高VDV事件: {value:.4f} > {threshold}")
  1203. def _detect_high_ava_vav_events(self, value, threshold):
  1204. """检测高AVA/VAV事件"""
  1205. if value > threshold:
  1206. start_time = self.ego_df['simTime'].min()
  1207. end_time = self.ego_df['simTime'].max()
  1208. start_frame = self.ego_df['simFrame'].min()
  1209. end_frame = self.ego_df['simFrame'].max()
  1210. new_row = pd.DataFrame([{
  1211. 'start_time': start_time,
  1212. 'end_time': end_time,
  1213. 'start_frame': start_frame,
  1214. 'end_frame': end_frame,
  1215. 'type': 'ava_vav'
  1216. }])
  1217. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1218. self.logger.info(f"检测到高AVA/VAV事件: {value:.4f} > {threshold}")
  1219. def _detect_high_msdv_events(self, value, threshold):
  1220. """检测高MSDV事件"""
  1221. if value > threshold:
  1222. start_time = self.ego_df['simTime'].min()
  1223. end_time = self.ego_df['simTime'].max()
  1224. start_frame = self.ego_df['simFrame'].min()
  1225. end_frame = self.ego_df['simFrame'].max()
  1226. new_row = pd.DataFrame([{
  1227. 'start_time': start_time,
  1228. 'end_time': end_time,
  1229. 'start_frame': start_frame,
  1230. 'end_frame': end_frame,
  1231. 'type': 'msdv'
  1232. }])
  1233. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1234. self.logger.info(f"检测到高MSDV事件: {value:.4f} > {threshold}")
  1235. def _detect_comfort_events_from_time_series(self, series, event_type, threshold, min_duration=0.5):
  1236. """
  1237. 从时间序列中检测舒适性事件
  1238. 参数:
  1239. series: 时间序列数据 (如加速度、舒适度指标等)
  1240. event_type: 事件类型名称
  1241. threshold: 事件检测阈值
  1242. min_duration: 最小事件持续时间(秒)
  1243. """
  1244. # 标记超过阈值的点
  1245. df = self.ego_df.copy()
  1246. df['exceed'] = (series > threshold).astype(int)
  1247. # 检测连续事件
  1248. events = []
  1249. in_event = False
  1250. start_idx = 0
  1251. for i, row in df.iterrows():
  1252. if row['exceed'] == 1 and not in_event:
  1253. # 开始新事件
  1254. in_event = True
  1255. start_idx = i
  1256. elif row['exceed'] == 0 and in_event:
  1257. # 结束当前事件
  1258. in_event = False
  1259. end_idx = i - 1
  1260. # 计算事件持续时间
  1261. start_time = df.loc[start_idx, 'simTime']
  1262. end_time = df.loc[end_idx, 'simTime']
  1263. duration = end_time - start_time
  1264. # 如果持续时间超过阈值,记录为有效事件
  1265. if duration >= min_duration:
  1266. events.append({
  1267. 'start_time': start_time,
  1268. 'end_time': end_time,
  1269. 'start_frame': df.loc[start_idx, 'simFrame'],
  1270. 'end_frame': df.loc[end_idx, 'simFrame'],
  1271. 'duration': duration,
  1272. 'max_value': series.loc[start_idx:end_idx].max()
  1273. })
  1274. # 添加到不舒适事件表
  1275. new_row = pd.DataFrame([{
  1276. 'start_time': start_time,
  1277. 'end_time': end_time,
  1278. 'start_frame': df.loc[start_idx, 'simFrame'],
  1279. 'end_frame': df.loc[end_idx, 'simFrame'],
  1280. 'type': event_type
  1281. }])
  1282. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1283. # 如果最后一个事件没有结束
  1284. if in_event:
  1285. end_idx = len(df) - 1
  1286. start_time = df.loc[start_idx, 'simTime']
  1287. end_time = df.loc[end_idx, 'simTime']
  1288. duration = end_time - start_time
  1289. if duration >= min_duration:
  1290. events.append({
  1291. 'start_time': start_time,
  1292. 'end_time': end_time,
  1293. 'start_frame': df.loc[start_idx, 'simFrame'],
  1294. 'end_frame': df.loc[end_idx, 'simFrame'],
  1295. 'duration': duration,
  1296. 'max_value': series.loc[start_idx:end_idx].max()
  1297. })
  1298. # 添加到不舒适事件表
  1299. new_row = pd.DataFrame([{
  1300. 'start_time': start_time,
  1301. 'end_time': end_time,
  1302. 'start_frame': df.loc[start_idx, 'simFrame'],
  1303. 'end_frame': df.loc[end_idx, 'simFrame'],
  1304. 'type': event_type
  1305. }])
  1306. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1307. return events
  1308. # ========== 辅助方法 ==========
  1309. def _calculate_psd(self, signal, fs):
  1310. """计算信号的功率谱密度
  1311. Args:
  1312. signal: 输入信号
  1313. fs: 采样频率
  1314. Returns:
  1315. tuple: 频率和对应的功率谱密度
  1316. """
  1317. # 使用Welch方法计算PSD
  1318. from scipy import signal as sp_signal
  1319. f, psd = sp_signal.welch(signal, fs, nperseg=min(256, len(signal) // 2))
  1320. return f, psd
  1321. def _cal_cur_ego_path(self, row):
  1322. """计算车辆轨迹曲率"""
  1323. try:
  1324. divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
  1325. if not divide:
  1326. res = None
  1327. else:
  1328. res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
  1329. except:
  1330. res = None
  1331. return res
  1332. def _peak_valley_determination(self, df):
  1333. """确定角速度的峰谷"""
  1334. peaks, _ = scipy.signal.find_peaks(
  1335. df['speedH'], height=2.3, distance=3,
  1336. prominence=2.3, width=1)
  1337. valleys, _ = scipy.signal.find_peaks(
  1338. -df['speedH'], height=2.3, distance=3,
  1339. prominence=2.3, width=1)
  1340. return sorted(list(peaks) + list(valleys))
  1341. def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=4.6):
  1342. """判断峰谷是否满足蛇行条件"""
  1343. t_diff = p_curr[0] - p_last[0]
  1344. v_diff = abs(p_curr[1] - p_last[1])
  1345. s = p_curr[1] * p_last[1]
  1346. if t_diff < tw and v_diff > avg and s < 0:
  1347. if [p_last[0], p_curr[0]] not in self.zigzag_time_list:
  1348. self.zigzag_time_list.append([p_last[0], p_curr[0]])
  1349. return True
  1350. return False
  1351. def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
  1352. """处理顿挫数据
  1353. 使用车辆坐标系下的纵向加速度判断顿挫
  1354. Args:
  1355. lon_acc: 纵向加速度(车辆坐标系)
  1356. ip_acc: 加速阈值
  1357. ip_dec: 减速阈值
  1358. Returns:
  1359. int/float: nan表示不符合顿挫条件,1表示加速顿挫,-1表示减速顿挫,0表示正常
  1360. """
  1361. if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
  1362. return np.nan
  1363. elif abs(lon_acc) == 0:
  1364. return 0
  1365. elif lon_acc > 0 and lon_acc < ip_acc:
  1366. return 1
  1367. elif lon_acc < 0 and lon_acc > ip_dec:
  1368. return -1
  1369. else:
  1370. return 0
  1371. @peak_valley_decorator
  1372. def _zigzag_detector(self, p_curr, p_last, flag=True):
  1373. """检测蛇行事件"""
  1374. if flag:
  1375. # 记录蛇行事件的起止时间和帧号
  1376. start_time = p_last[0]
  1377. end_time = p_curr[0]
  1378. start_frame = get_frame_with_time(self.time_list, self.frame_list, start_time)
  1379. end_frame = get_frame_with_time(self.time_list, self.frame_list, end_time)
  1380. # 计算事件持续时间
  1381. duration = end_time - start_time
  1382. # 设置最小持续时间阈值
  1383. min_duration = 0.5 # 秒
  1384. if duration >= min_duration:
  1385. # 更新蛇行计数
  1386. self.zigzag_count += 1
  1387. # 添加到不舒适事件表
  1388. new_row = pd.DataFrame([{
  1389. 'start_time': start_time,
  1390. 'end_time': end_time,
  1391. 'start_frame': start_frame,
  1392. 'end_frame': end_frame,
  1393. 'type': 'zigzag'
  1394. }])
  1395. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1396. # 记录事件信息到zigzag_time_list
  1397. self.zigzag_time_list.append({
  1398. 'start_time': start_time,
  1399. 'end_time': end_time,
  1400. 'start_frame': start_frame,
  1401. 'end_frame': end_frame,
  1402. 'duration': duration
  1403. })
  1404. else:
  1405. self.zigzag_count += 0
  1406. @peak_valley_decorator
  1407. def _cal_zigzag_strength(self, p_curr, p_last, flag=True):
  1408. """计算蛇行强度"""
  1409. if flag:
  1410. v_diff = abs(p_curr[1] - p_last[1])
  1411. t_diff = p_curr[0] - p_last[0]
  1412. if t_diff > 0:
  1413. self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
  1414. else:
  1415. self.zigzag_stre_list = []
  1416. def _get_zigzag_times(self):
  1417. """获取所有蛇行时间点"""
  1418. all_times = []
  1419. for time_range in self.zigzag_time_list:
  1420. start, end = time_range
  1421. # 获取这个时间范围内的所有时间点
  1422. times_in_range = self.ego_df[(self.ego_df['simTime'] >= start) &
  1423. (self.ego_df['simTime'] <= end)]['simTime'].tolist()
  1424. all_times.extend(times_in_range)
  1425. return all_times
  1426. def _shake_detector(self, T_diff=0.5):
  1427. """检测晃动事件 - 改进版本(使用向量化操作)"""
  1428. # 获取数据
  1429. df = self.ego_df.copy()
  1430. # 检查是否有必要的列
  1431. if 'lat_acc' not in df.columns or 'posH' not in df.columns:
  1432. self.logger.warning("缺少计算晃动指标所需的数据列")
  1433. return []
  1434. # 将东北天坐标系下的数据转换为车身坐标系
  1435. # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
  1436. df['posH_rad'] = np.radians(df['posH'])
  1437. # 转换横向加速度到车身坐标系
  1438. df['lat_acc_body'] = df['lat_acc'] * np.cos(df['posH_rad']) - df['lon_acc'] * np.sin(df['posH_rad'])
  1439. # 转换横摆角速度到车身坐标系
  1440. # speedH已经是车身坐标系下的横摆角速度,不需要转换
  1441. df['speedH_body'] = df['speedH']
  1442. # 1. 计算横向加速度变化率(使用车身坐标系下的横向加速度)
  1443. df['lat_acc_rate'] = df['lat_acc_body'].diff() / df['simTime'].diff()
  1444. # 2. 计算横摆角速度变化率(使用车身坐标系下的横摆角速度)
  1445. df['speedH_rate'] = df['speedH_body'].diff() / df['simTime'].diff()
  1446. # 3. 计算横摆角速度的短期变化特性
  1447. window_size = 10 # 10帧窗口
  1448. df['speedH_std'] = df['speedH'].rolling(window=window_size, min_periods=2).std()
  1449. # 4. 基于车速的动态阈值
  1450. v0 = 20 * 5 / 18 # ≈5.56 m/s
  1451. k = 0.008 * 3.6 # =0.0288 per m/s
  1452. df['lat_acc_threshold'] = df['v'].apply(
  1453. lambda speed: max(
  1454. 1.0, # 下限 1.0 m/s²
  1455. min(
  1456. 1.8, # 上限 1.8 m/s²
  1457. 1.8 - k * (speed - v0) # 线性递减
  1458. )
  1459. )
  1460. )
  1461. df['speedH_threshold'] = df['v'].apply(
  1462. lambda speed: max(1.5, min(3.0, 2.0 * (1 + (speed - 20) / 60)))
  1463. )
  1464. # 5. 综合判断晃动条件
  1465. # 条件A: 横向加速度超过阈值
  1466. condition_A = df['lat_acc'].abs() > df['lat_acc_threshold']
  1467. # 条件B: 横向加速度变化率超过阈值
  1468. lat_acc_rate_threshold = 0.5 # 横向加速度变化率阈值 (m/s³)
  1469. condition_B = df['lat_acc_rate'].abs() > lat_acc_rate_threshold
  1470. # 条件C: 横摆角速度有明显变化但不呈现周期性
  1471. condition_C = (df['speedH_std'] > df['speedH_threshold'])
  1472. # 综合条件: 满足条件A,且满足条件B或条件C
  1473. shake_condition = condition_A & (condition_B | condition_C)
  1474. # 6. 使用向量化操作检测连续事件
  1475. event_groups = (shake_condition != shake_condition.shift()).cumsum()
  1476. shake_events = []
  1477. for _, group in df[shake_condition].groupby(event_groups):
  1478. if len(group) >= 2: # 至少2帧才算一次晃动
  1479. start_time = group['simTime'].iloc[0]
  1480. end_time = group['simTime'].iloc[-1]
  1481. duration = end_time - start_time
  1482. if duration >= T_diff: # 只记录持续时间超过阈值的事件
  1483. shake_events.append({
  1484. 'start_time': start_time,
  1485. 'end_time': end_time,
  1486. 'start_frame': group['simFrame'].iloc[0],
  1487. 'end_frame': group['simFrame'].iloc[-1],
  1488. 'duration': duration,
  1489. 'max_lat_acc': group['lat_acc'].abs().max()
  1490. })
  1491. # 添加到不舒适事件表
  1492. new_row = pd.DataFrame([{
  1493. 'start_time': start_time,
  1494. 'end_time': end_time,
  1495. 'start_frame': group['simFrame'].iloc[0],
  1496. 'end_frame': group['simFrame'].iloc[-1],
  1497. 'type': 'shake'
  1498. }])
  1499. self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
  1500. # 更新晃动计数
  1501. self.shake_count = len(shake_events)
  1502. self.logger.info(f"检测到 {self.shake_count} 次晃动事件")
  1503. # 更新ego_df中的相关列
  1504. self.ego_df = df.copy()
  1505. # 保存晃动事件数据
  1506. self.shake_events = shake_events
  1507. return shake_events
  1508. class ComfortManager:
  1509. """舒适性指标计算主类"""
  1510. def __init__(self, data_processed):
  1511. self.data = data_processed
  1512. self.logger = LogManager().get_logger()
  1513. self.registry = ComfortRegistry(self.data)
  1514. def report_statistic(self):
  1515. """生成舒适性评分报告"""
  1516. comfort_result = self.registry.batch_execute()
  1517. return comfort_result