1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876 |
- """
- 核心功能
- 坐标系处理:
- 支持东北天坐标系(ENU)到车身坐标系的转换
- 使用航向角(posH)进行坐标系旋转
- 车辆坐标系定义:x前,y左,z上
- 舒适性指标计算:
- 运动舒适度指数(motionComfortIndex)
- 乘坐质量评分(rideQualityScore)
- 晕车概率(motionSickness)
- 振动剂量值(VDV)
- 多维度综合加权加速度(ava_vav)
- 晕动剂量值(MSDV)
- 蛇行指标(zigzag)
- 晃动指标(shake)
- 顿挫指标(cadence)
- 急刹车指标(slamBrake)
- 急加速指标(slamAccelerate)
- 事件检测:
- 所有指标均支持事件检测
- 当指标超过阈值时记录事件
- 记录事件起止时间、帧号和类型
- 所有事件保存到discomfort_df数据框中
- 关键技术
- ISO 2631-1:1997标准:
- 实现Wk、Wd、Wf三种频率加权滤波器
- 用于计算VDV、MSDV等指标
- 车辆动力学分析:
- 基于加速度变化率(Jerk)评估舒适性
- 考虑三轴加速度和角速度的综合影响
- 事件检测算法:
- 峰值检测(蛇行、晃动)
- 聚类分析(顿挫)
- 连续事件检测(急刹车、急加速)
- 数据结构
- discomfort_df:
- 存储所有检测到的不舒适事件
- 包含字段:start_time, end_time, start_frame, end_frame, type
- calculated_value:
- 存储各指标的计算结果
- 便于后续报告生成
- 日志系统
- 详细日志记录:
- 记录指标计算过程
- 记录事件检测结果
- 记录异常情况
- 扩展性
- 模块化设计:
- 每个指标独立计算
- 方便添加新指标
- 配置驱动:
- 通过配置文件定义需要计算的指标
- 支持动态扩展
- """
- # !/usr/bin/env python
- # -*- coding: utf-8 -*-
- ##################################################################
- #
- # Copyright (c) 2023 CICV, Inc. All Rights Reserved
- #
- ##################################################################
- """
- @Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
- @Data: 2023/06/25
- @Last Modified: 2025/04/25
- @Summary: Comfort metrics
- """
- import scipy.signal
- import pandas as pd
- import numpy as np
- import os
- from pathlib import Path
- from typing import Dict, List, Any, Optional, Callable, Union, Tuple
- from modules.lib.score import Score
- from modules.lib.common import get_interpolation, get_frame_with_time
- from modules.lib import data_process
- from modules.lib.log_manager import LogManager
- from modules.lib.chart_generator import generate_comfort_chart_data
- # 更新COMFORT_INFO列表,添加车辆坐标系下的速度和加速度字段
- COMFORT_INFO = [
- "simTime",
- "simFrame",
- "speedX",
- "speedY",
- "accelX",
- "accelY",
- "curvHor",
- "lightMask",
- "v",
- "lat_acc",
- "lon_acc",
- "time_diff",
- "lon_acc_diff",
- "lon_acc_roc",
- "speedH",
- "accelH",
- "posH",
- "lon_acc_vehicle", # 车辆坐标系下的纵向加速度
- "lat_acc_vehicle", # 车辆坐标系下的横向加速度
- "acc_z_vehicle", # 车辆坐标系下的垂向加速度
- "lon_v_vehicle", # 车辆坐标系下的纵向速度
- "lat_v_vehicle", # 车辆坐标系下的横向速度
- "vel_z_vehicle" # 车辆坐标系下的垂向速度
- ]
- # ----------------------
- # 独立指标计算函数
- # ----------------------
- # 更新指标计算函数,返回事件次数而非指标值
- def calculate_motioncomfortindex(data_processed) -> dict:
- """计算运动舒适度指数事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算舒适度指数并检测事件
- comfort.calculate_motion_comfort_index()
- # 统计事件类型为'motionComfortIndex'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'motionComfortIndex'])
- return {"motionComfortIndex": float(count)}
- def calculate_ridequalityscore(data_processed) -> dict:
- """计算乘坐质量评分事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算乘坐质量评分并检测事件
- comfort.calculate_ride_quality_score()
- # 统计事件类型为'rideQualityScore'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'rideQualityScore'])
- return {"rideQualityScore": float(count)}
- def calculate_motionsickness(data_processed) -> dict:
- """计算晕车概率事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算晕车概率并检测事件
- comfort.calculate_motion_sickness_probability()
- # 统计事件类型为'motionSickness'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'motionSickness'])
- return {"motionSickness": float(count)}
- def calculate_vdv(data_processed) -> dict:
- """计算振动剂量值(VDV)事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算VDV并检测事件
- comfort.calculate_vdv()
- # 统计事件类型为'vdv'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'vdv'])
- return {"vdv": float(count)}
- def calculate_ava_vav(data_processed) -> dict:
- """计算多维度综合加权加速度事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算AVA/VAV并检测事件
- comfort.calculate_ava_vav()
- # 统计事件类型为'ava_vav'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'ava_vav'])
- return {"ava_vav": float(count)}
- def calculate_msdv(data_processed) -> dict:
- """计算晕动剂量值(MSDV)事件次数"""
- comfort = ComfortCalculator(data_processed)
- # 计算MSDV并检测事件
- comfort.calculate_msdv()
- # 统计事件类型为'msdv'的事件次数
- count = len(comfort.discomfort_df[comfort.discomfort_df['type'] == 'msdv'])
- return {"msdv": float(count)}
- def calculate_zigzag(data_processed) -> dict:
- """计算蛇行指标"""
- comfort = ComfortCalculator(data_processed)
- zigzag_count = comfort.calculate_zigzag_count()
- return {"zigzag": float(zigzag_count)}
- def calculate_shake(data_processed) -> dict:
- """计算晃动指标"""
- comfort = ComfortCalculator(data_processed)
- shake_count = comfort.calculate_shake_count()
- return {"shake": float(shake_count)}
- def calculate_cadence(data_processed) -> dict:
- """计算顿挫指标"""
- comfort = ComfortCalculator(data_processed)
- cadence_count = comfort.calculate_cadence_count()
- return {"cadence": float(cadence_count)}
- def calculate_slambrake(data_processed) -> dict:
- """计算急刹车指标"""
- comfort = ComfortCalculator(data_processed)
- slam_brake_count = comfort.calculate_slam_brake_count()
- return {"slamBrake": float(slam_brake_count)}
- def calculate_slamaccelerate(data_processed) -> dict:
- """计算急加速指标"""
- comfort = ComfortCalculator(data_processed)
- slam_accel_count = comfort.calculate_slam_accel_count()
- return {"slamAccelerate": float(slam_accel_count)}
- # 装饰器保持不变
- def peak_valley_decorator(method):
- def wrapper(self, *args, **kwargs):
- peak_valley = self._peak_valley_determination(self.df)
- pv_list = self.df.loc[peak_valley, ['simTime', 'speedH']].values.tolist()
- if len(pv_list) != 0:
- flag = True
- p_last = pv_list[0]
- for i in range(1, len(pv_list)):
- p_curr = pv_list[i]
- if self._peak_valley_judgment(p_last, p_curr):
- # method(self, p_curr, p_last)
- method(self, p_curr, p_last, flag, *args, **kwargs)
- else:
- p_last = p_curr
- return method
- else:
- flag = False
- p_curr = [0, 0]
- p_last = [0, 0]
- method(self, p_curr, p_last, flag, *args, **kwargs)
- return method
- return wrapper
- class ComfortRegistry:
- """舒适性指标注册器"""
- def __init__(self, data_processed):
- self.logger = LogManager().get_logger() # 获取全局日志实例
- self.data = data_processed
- self.comfort_config = data_processed.comfort_config["comfort"]
- self.metrics = self._extract_metrics(self.comfort_config)
- self._registry = self._build_registry()
- self.output_dir = None # 图表数据输出目录
- def _extract_metrics(self, config_node: dict) -> list:
- """DFS遍历提取指标"""
- metrics = []
- def _recurse(node):
- if isinstance(node, dict):
- if 'name' in node and not any(isinstance(v, dict) for v in node.values()):
- metrics.append(node['name'])
- for v in node.values():
- _recurse(v)
- _recurse(config_node)
- self.logger.info(f'评比的舒适性指标列表:{metrics}')
- return metrics
- def _build_registry(self) -> dict:
- """自动注册指标函数"""
- registry = {}
- for metric_name in self.metrics:
- func_name = f"calculate_{metric_name.lower()}"
- try:
- registry[metric_name] = globals()[func_name]
- except KeyError:
- self.logger.error(f"未实现指标函数: {func_name}")
- return registry
- def batch_execute(self) -> dict:
- """批量执行指标计算"""
- results = {}
- for name, func in self._registry.items():
- try:
- result = func(self.data)
- results.update(result)
- # 新增:将每个指标的结果写入日志
- self.logger.info(f'舒适性指标[{name}]计算结果: {result}')
- except Exception as e:
- self.logger.error(f"{name} 执行失败: {str(e)}", exc_info=True)
- results[name] = None
- self.logger.info(f'舒适性指标计算结果:{results}')
- return results
- class ComfortCalculator:
- """舒适性指标计算类 - 提供核心计算功能"""
- def generate_metric_chart(self, metric_name: str) -> None:
- """
- 生成指标图表
- Args:
- metric_name: 指标名称
- """
- # 设置输出目录
- if not hasattr(self, 'output_dir') or not self.output_dir:
- self.output_dir = os.path.join(os.getcwd(), 'data')
- os.makedirs(self.output_dir, exist_ok=True)
- # 调用chart_generator中的函数生成图表
- chart_path = generate_comfort_chart_data(self, metric_name, self.output_dir)
- if chart_path:
- self.logger.info(f"{metric_name}图表已生成: {chart_path}")
- def __init__(self, data_processed):
- self.data_processed = data_processed
- self.logger = LogManager().get_logger()
- self.data = data_processed.ego_data
- self.ego_df = pd.DataFrame()
- self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
- # 统计指标
- self.calculated_value = {
- 'zigzag': 0,
- 'shake': 0,
- 'cadence': 0,
- 'slamBrake': 0,
- 'slamAccelerate': 0,
- 'ava_vav': 0, # 添加新指标的默认值
- 'msdv': 0, # 添加MSDV指标的默认值
- 'motionSickness': 0, # 添加晕车概率指标的默认值
- 'vdv:': 0,
- 'motionComfortIndex': 0, # 新增指标
- 'rideQualityScore': 0 # 新增指标
- }
- self.time_list = self.data['simTime'].values.tolist()
- self.frame_list = self.data['simFrame'].values.tolist()
- self.zigzag_count = 0
- self.shake_count = 0
- self.cadence_count = 0
- self.slam_brake_count = 0
- self.slam_accel_count = 0
- self.zigzag_time_list = []
- self.zigzag_stre_list = []
- self.shake_events = [] # 用于存储晃动事件数据
- self._initialize_data()
- def _initialize_data(self):
- """初始化数据"""
- self.ego_df = self.data[COMFORT_INFO].copy()
- self.df = self.ego_df.reset_index(drop=True)
- self._prepare_comfort_parameters()
- def _prepare_comfort_parameters(self):
- """准备舒适性计算所需参数"""
- # 计算加减速阈值 - 使用车辆坐标系下的纵向速度代替合速度
- speed_field = 'lon_v_vehicle' if 'lon_v_vehicle' in self.ego_df.columns else 'v'
- self.logger.info(f"加减速阈值计算使用的速度字段: {speed_field}")
- self.ego_df['ip_acc'] = self.ego_df[speed_field].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
- self.ego_df['ip_dec'] = self.ego_df[speed_field].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
- # 使用车辆坐标系下的纵向加速度计算急刹车和急加速
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in self.ego_df.columns else 'lon_acc'
- self.logger.info(f"急刹车和急加速检测使用的加速度字段: {acc_field}")
- # 使用车辆坐标系下的纵向加速度与阈值比较,判断急刹车和急加速
- self.ego_df['slam_brake'] = (self.ego_df[acc_field] - self.ego_df['ip_dec']).apply(
- lambda x: 1 if x < 0 else 0)
- self.ego_df['slam_accel'] = (self.ego_df[acc_field] - self.ego_df['ip_acc']).apply(
- lambda x: 1 if x > 0 else 0)
- # 确保cadence列使用车辆坐标系下的纵向加速度计算
- self.ego_df['cadence'] = self.ego_df.apply(
- lambda row: self._cadence_process_new(row[acc_field], row['ip_acc'], row['ip_dec']), axis=1)
- def _apply_frequency_weighting(self, acceleration_data, weighting_type='Wk', fs=100):
- """应用ISO 2631-1:1997标准的频率加权滤波
- 参数:
- acceleration_data: 加速度时间序列数据
- weighting_type: 加权类型,可选值包括:
- - 'Wk': 垂直方向(Z轴)加权
- - 'Wd': 水平方向(X和Y轴)加权
- - 'Wf': 运动病相关加权
- fs: 采样频率(Hz)
- 返回:
- 加权后的加速度数据
- """
- # 检查数据有效性
- if acceleration_data.empty or acceleration_data.isna().all():
- return acceleration_data
- # 根据ISO 2631-1:1997标准设计滤波器
- # 这些参数来自标准文档,用于构建数字滤波器
- if weighting_type == 'Wk': # 垂直方向(Z轴)
- # Wk滤波器参数
- f1 = 0.4
- f2 = 100.0
- f3 = 12.5
- f4 = 12.5
- Q1 = 0.63
- Q2 = 0.5
- Q3 = 0.63
- Q4 = 0.63
- K = 0.4
- elif weighting_type == 'Wd': # 水平方向(X和Y轴)
- # Wd滤波器参数
- f1 = 0.4
- f2 = 100.0
- f3 = 2.0
- f4 = 2.0
- Q1 = 0.63
- Q2 = 0.5
- Q3 = 0.63
- Q4 = 0.63
- K = 0.4
- elif weighting_type == 'Wf': # 运动病相关
- # Wf滤波器参数
- f1 = 0.08
- f2 = 0.63
- f3 = 0.25
- f4 = 0.8
- Q1 = 0.63
- Q2 = 0.86
- Q3 = 0.8
- Q4 = 0.8
- K = 1.0
- else:
- self.logger.warning(f"未知的加权类型: {weighting_type},使用原始数据")
- return acceleration_data
- # 将频率转换为角频率
- w1 = 2 * np.pi * f1
- w2 = 2 * np.pi * f2
- w3 = 2 * np.pi * f3
- w4 = 2 * np.pi * f4
- # 设计高通滤波器(s域)
- b1 = [K * w1 ** 2, 0]
- a1 = [1, w1 / Q1, w1 ** 2]
- # 设计低通滤波器(s域)
- b2 = [K, 0, 0]
- a2 = [1, w2 / Q2, w2 ** 2]
- # 设计加速度-速度转换滤波器(s域)
- b3 = [K, 0]
- a3 = [1, w3 / Q3, w3 ** 2]
- # 设计上升滤波器(s域)
- b4 = [K, 0, 0]
- a4 = [1, w4 / Q4, w4 ** 2]
- # 使用双线性变换将s域滤波器转换为z域
- b1_z, a1_z = scipy.signal.bilinear(b1, a1, fs)
- b2_z, a2_z = scipy.signal.bilinear(b2, a2, fs)
- b3_z, a3_z = scipy.signal.bilinear(b3, a3, fs)
- b4_z, a4_z = scipy.signal.bilinear(b4, a4, fs)
- # 应用滤波器链
- data_np = acceleration_data.to_numpy()
- filtered_data = scipy.signal.lfilter(b1_z, a1_z, data_np)
- filtered_data = scipy.signal.lfilter(b2_z, a2_z, filtered_data)
- filtered_data = scipy.signal.lfilter(b3_z, a3_z, filtered_data)
- filtered_data = scipy.signal.lfilter(b4_z, a4_z, filtered_data)
- return pd.Series(filtered_data, index=acceleration_data.index)
- def calculate_motion_comfort_index(self):
- """
- 计算运动舒适度指数(Motion Comfort Index)并检测低舒适度事件
- 使用车辆坐标系中的加速度与角速度数据,评估车辆运行过程中的乘坐舒适性。
- 返回范围:0~10,10为最舒适。
- """
- # 实际计算运动舒适度指数
- comfort_index = self._calculate_motion_comfort_index()
- # 直接设置阈值
- self._detect_threshold_events(
- comfort_index,
- 'motionComfortIndex',
- min_threshold=8.0, # 硬编码阈值
- max_threshold=10.0
- )
- return comfort_index
- def _calculate_motion_comfort_index(self):
- """实际计算运动舒适度指数"""
- df = self.ego_df.copy()
- # 检查必要字段
- required_cols = ['lon_acc_vehicle', 'lat_acc_vehicle']
- if not all(col in df.columns for col in required_cols):
- self.logger.warning("缺少车辆坐标系下的加速度列,无法计算运动舒适度")
- return 8.0
- # 合成加速度模长
- df['accel_magnitude'] = np.sqrt(df['lon_acc_vehicle'] ** 2 + df['lat_acc_vehicle'] ** 2)
- if 'acc_z_vehicle' in df.columns:
- df['accel_magnitude'] = np.sqrt(df['accel_magnitude'] ** 2 + df['acc_z_vehicle'] ** 2)
- # 时间差
- df['time_diff'] = df['simTime'].diff().fillna(0.01)
- # 加加速度(Jerk)
- df['jerk_lon'] = df['lon_acc_vehicle'].diff() / df['time_diff']
- df['jerk_lat'] = df['lat_acc_vehicle'].diff() / df['time_diff']
- df['jerk_magnitude'] = np.sqrt(df['jerk_lon'] ** 2 + df['jerk_lat'] ** 2)
- if 'acc_z_vehicle' in df.columns:
- df['jerk_z'] = df['acc_z_vehicle'].diff() / df['time_diff']
- df['jerk_magnitude'] = np.sqrt(df['jerk_magnitude'] ** 2 + df['jerk_z'] ** 2)
- # 角速度模长(以 rollRate, pitchRate, speedH 合成)
- omega_roll = df['rollRate'] if 'rollRate' in df.columns else pd.Series(np.zeros(len(df)))
- omega_pitch = df['pitchRate'] if 'pitchRate' in df.columns else pd.Series(np.zeros(len(df)))
- omega_yaw = df['speedH']
- df['angular_velocity'] = np.sqrt(omega_roll ** 2 + omega_pitch ** 2 + omega_yaw ** 2)
- # RMS 计算
- accel_rms = np.sqrt(np.mean(df['accel_magnitude'] ** 2))
- jerk_rms = np.sqrt(np.mean(df['jerk_magnitude'] ** 2))
- angular_rms = np.sqrt(np.mean(df['angular_velocity'] ** 2))
- # 阈值与权重
- accel_threshold = 2.0 # m/s²
- jerk_threshold = 1.0 # m/s³
- angular_threshold = 0.2 # rad/s
- accel_weight = 0.5
- jerk_weight = 0.3
- angular_weight = 0.2
- # 分数计算(0-10)
- accel_score = 10 * np.exp(-max(0, accel_rms - accel_threshold) / accel_threshold)
- jerk_score = 10 * np.exp(-max(0, jerk_rms - jerk_threshold) / jerk_threshold)
- angular_score = 10 * np.exp(-max(0, angular_rms - angular_threshold) / angular_threshold)
- comfort_index = (accel_weight * accel_score +
- jerk_weight * jerk_score +
- angular_weight * angular_score)
- comfort_index = np.clip(comfort_index, 0, 10)
- self.calculated_value['motionComfortIndex'] = comfort_index
- self.logger.info(f"运动舒适度指数(Motion Comfort Index): {comfort_index:.2f}/10")
- self.logger.info(
- f"加速度RMS: {accel_rms:.4f} m/s², 加加速度RMS: {jerk_rms:.4f} m/s³, 角速度RMS: {angular_rms:.4f} rad/s")
- return comfort_index
- def calculate_ride_quality_score(self):
- """
- 计算乘坐质量评分(Ride Quality Score)并检测低质量事件
- 基于 ISO 2631 标准,主要参考垂直振动(Z轴),用于评估车辆在颠簸路段的舒适性。
- 返回范围:0~100。
- 同时检测评分低于阈值(60)的事件
- """
- # 实际计算乘坐质量评分
- ride_quality_score = self._calculate_ride_quality_score()
- # 直接设置阈值
- self._detect_threshold_events(
- ride_quality_score,
- 'rideQualityScore',
- min_threshold=60.0, # 硬编码阈值
- max_threshold=100.0
- )
- return ride_quality_score
- def _calculate_ride_quality_score(self):
- """实际计算乘坐质量评分"""
- df = self.ego_df.copy()
- if 'acc_z_vehicle' not in df.columns:
- self.logger.warning("缺少垂直方向加速度(acc_z_vehicle),无法计算乘坐质量评分")
- return 70.0
- a_z_body = df['acc_z_vehicle']
- # 采样频率估计
- if len(df) > 1:
- time_diff = df['simTime'].diff().median()
- fs = 1.0 / time_diff if time_diff > 0 else 100
- else:
- fs = 100
- # ISO 2631 加权滤波
- a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wk', fs)
- # 计算 RMS
- a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
- # ISO 等级评分
- if a_z_rms < 0.315:
- base_score = 90
- elif a_z_rms < 0.63:
- base_score = 80
- elif a_z_rms < 1.0:
- base_score = 70
- elif a_z_rms < 1.6:
- base_score = 60
- elif a_z_rms < 2.5:
- base_score = 40
- else:
- base_score = 20
- # 评分调节因子
- duration_factor = min(1.0, 10.0 / (df['simTime'].max() - df['simTime'].min()))
- if len(a_z_weighted) > 50:
- f, psd = self._calculate_psd(a_z_weighted, fs)
- sensitive_mask = (f >= 4) & (f <= 8)
- sensitive_energy = np.sum(psd[sensitive_mask])
- total_energy = np.sum(psd)
- frequency_factor = 1.0 - 0.3 * (sensitive_energy / total_energy if total_energy > 0 else 0)
- else:
- frequency_factor = 1.0
- ride_quality_score = base_score * duration_factor * frequency_factor
- ride_quality_score = np.clip(ride_quality_score, 0, 100)
- self.calculated_value['rideQualityScore'] = ride_quality_score
- self.logger.info(f"乘坐质量评分(Ride Quality Score): {ride_quality_score:.2f}/100")
- self.logger.info(f"垂直加速度RMS: {a_z_rms:.4f} m/s²")
- # self.generate_metric_chart('rideQualityScore')
- return ride_quality_score
- def calculate_motion_sickness_probability(self):
- """计算晕车概率指标并检测高概率事件"""
- # 实际计算晕车概率
- motion_sickness_prob = self._calculate_motion_sickness_probability()
- # 直接设置阈值
- self._detect_threshold_events(
- motion_sickness_prob,
- 'motionSickness',
- min_threshold=0.0,
- max_threshold=30.0 # 硬编码阈值
- )
- # self.generate_metric_chart('motionsickness')
- return motion_sickness_prob
- def _calculate_motion_sickness_probability(self):
- """实际计算晕车概率指标"""
- # 获取数据
- df = self.ego_df.copy()
- # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
- df['posH_rad'] = np.radians(df['posH'])
- # 转换加速度到车身坐标系
- df['a_x_body'] = df['lon_acc_vehicle']
- df['a_y_body'] = df['lat_acc_vehicle']
- # Z方向加速度,如果没有则假设为0
- df['a_z_body'] = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
- # 计算时间差
- df['time_diff'] = df['simTime'].diff().fillna(0)
- # 估计采样频率
- if len(df) > 1:
- time_diff = df['simTime'].diff().median()
- fs = 1.0 / time_diff if time_diff > 0 else 100
- else:
- fs = 100
- # 对各方向加速度应用适当的频率加权
- a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wf', fs)
- a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wf', fs)
- a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wf', fs)
- # 计算加加速度(Jerk)
- df['jerk_x'] = a_x_weighted.diff() / df['time_diff']
- df['jerk_y'] = a_y_weighted.diff() / df['time_diff']
- df['jerk_z'] = a_z_weighted.diff() / df['time_diff']
- # 填充NaN值
- df[['jerk_x', 'jerk_y', 'jerk_z']] = df[['jerk_x', 'jerk_y', 'jerk_z']].fillna(0)
- # 计算Jerk的均方根值(RMS)
- jerk_squared_sum = df['jerk_x'] ** 2 + df['jerk_y'] ** 2 + df['jerk_z'] ** 2
- jerk_rms = np.sqrt(np.mean(jerk_squared_sum))
- # 计算加速度平方和的均值
- accel_squared_sum = a_x_weighted ** 2 + a_y_weighted ** 2 + a_z_weighted ** 2
- accel_squared_mean = np.mean(accel_squared_sum)
- # 设置模型参数
- alpha = 0.1 # 加速度权重(s⁴/m²)
- beta = 0.5 # Jerk权重(s²/m²)
- gamma = 10.0 # 归一化因子(m²/s⁴)
- # 计算晕车概率
- acceleration_term = alpha * accel_squared_mean
- jerk_term = beta * jerk_rms
- score = (acceleration_term + jerk_term) / gamma
- probability = 100 * (1 - np.exp(-score))
- # 限制在0-100%范围内
- probability = np.clip(probability, 0, 100)
- # 记录计算结果
- self.calculated_value['motionSickness'] = probability
- self.logger.info(f"晕车概率(Motion Sickness Probability)计算结果: {probability:.2f}%")
- self.logger.info(f"加速度平方和均值: {accel_squared_mean:.4f} m²/s⁴, Jerk均方根值: {jerk_rms:.4f} m/s³")
- return probability
- def calculate_vdv(self):
- """计算振动剂量值(Vibration Dose Value, VDV)指标并检测高VDV事件"""
- # 实际计算VDV
- vdv_value = self._calculate_vdv()
- # 直接设置阈值
- self._detect_threshold_events(
- vdv_value,
- 'vdv',
- min_threshold=0.0,
- max_threshold=8.0 # 硬编码阈值
- )
- # self.generate_metric_chart('vdv')
- return vdv_value
- def _calculate_vdv(self):
- """实际计算振动剂量值"""
- # 获取数据
- df = self.ego_df.copy()
- # 使用车身坐标系下的加速度数据
- if 'lon_acc_vehicle' in df.columns and 'lat_acc_vehicle' and 'acc_z_vehicle' in df.columns:
- # 使用已转换的数据
- a_x_body = df['lon_acc_vehicle']
- a_y_body = df['lat_acc_vehicle']
- a_z_body = df['acc_z_vehicle']
- else:
- self.logger.warning("缺少lon_acc_vehicle等数据,无法进行坐标转换")
- return self.calculated_value['vdv']
- # 计算时间差
- df['time_diff'] = df['simTime'].diff().fillna(0)
- # 估计采样频率
- if len(df) > 1:
- time_diff = df['simTime'].diff().median()
- fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
- else:
- fs = 100 # 默认采样频率
- # 对各方向加速度应用适当的频率加权
- a_x_weighted = self._apply_frequency_weighting(a_x_body, 'Wd', fs) # 水平方向使用Wd
- a_y_weighted = self._apply_frequency_weighting(a_y_body, 'Wd', fs) # 水平方向使用Wd
- a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wk', fs) # 垂直方向使用Wk
- # 计算加权均方根值 (r.m.s.)
- a_x_rms = np.sqrt(np.mean(a_x_weighted ** 2))
- a_y_rms = np.sqrt(np.mean(a_y_weighted ** 2))
- a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
- # 记录r.m.s.值用于参考
- self.logger.info(f"X方向加权均方根值: {a_x_rms}")
- self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
- self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
- # 计算VDV - 对加速度四次方进行时间积分,再开四次方根
- # 对于X方向(前后方向)
- vdv_x = np.power(np.sum(np.power(np.abs(a_x_weighted), 4) * df['time_diff']), 0.25)
- # 对于Y方向(左右方向)
- vdv_y = np.power(np.sum(np.power(np.abs(a_y_weighted), 4) * df['time_diff']), 0.25)
- # 对于Z方向(上下方向)
- vdv_z = np.power(np.sum(np.power(np.abs(a_z_weighted), 4) * df['time_diff']), 0.25)
- # 综合VDV - 可以使用向量和或加权和
- # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
- vdv = np.sqrt(vdv_x ** 2 + vdv_y ** 2 + (1.4 * vdv_z) ** 2)
- # 记录计算结果
- self.calculated_value['vdv'] = vdv
- self.logger.info(f"振动剂量值(VDV)计算结果: {vdv}")
- self.logger.info(f"X方向VDV: {vdv_x}, Y方向VDV: {vdv_y}, Z方向VDV: {vdv_z}")
- # 生成VDV指标图表
- # self.generate_metric_chart('vdv')
- return vdv
- def calculate_ava_vav(self):
- """计算多维度综合加权加速度并检测高值事件"""
- # 实际计算AVA/VAV
- ava_vav_value = self._calculate_ava_vav()
- # 检测高值事件
- # 直接设置阈值
- self._detect_threshold_events(
- ava_vav_value,
- 'ava_vav',
- min_threshold=0.0,
- max_threshold=0.63 # 硬编码阈值
- )
- # self.generate_metric_chart('ava_vav')
- return ava_vav_value
- def _calculate_ava_vav(self):
- """实际计算多维度综合加权加速度"""
- # 定义各方向的权重系数
- k_x = 1.0 # X方向加速度权重
- k_y = 1.0 # Y方向加速度权重
- k_z = 1.0 # Z方向加速度权重
- k_roll = 0.63 # 横滚角速度权重
- k_pitch = 0.8 # 俯仰角速度权重
- k_yaw = 0.5 # 偏航角速度权重
- # 获取数据
- df = self.ego_df.copy()
- # 计算时间差
- df['time_diff'] = df['simTime'].diff().fillna(0)
- df['a_x_body'] = df['lon_acc_vehicle']
- df['a_y_body'] = df['lat_acc_vehicle']
- # Z方向加速度,如果没有则假设为0
- df['a_z_body'] = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
- # 角速度数据,如果没有则使用角速度变化率代替
- # 注意:speedH是航向角速度,需要转换为车身坐标系下的偏航角速度
- omega_roll = df['rollRate'] if 'rollRate' in df.columns else pd.Series(np.zeros(len(df)))
- omega_pitch = df['pitchRate'] if 'pitchRate' in df.columns else pd.Series(np.zeros(len(df)))
- omega_yaw = df['speedH'] # 使用航向角速度作为偏航角速度
- # 应用ISO 2631-1:1997标准的频率加权滤波
- # 估计采样频率 - 假设数据是均匀采样的
- if len(df) > 1:
- time_diff = df['simTime'].diff().median()
- fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
- else:
- fs = 100 # 默认采样频率
- # 对各方向加速度应用适当的频率加权
- a_x_weighted = self._apply_frequency_weighting(df['a_x_body'], 'Wd', fs)
- a_y_weighted = self._apply_frequency_weighting(df['a_y_body'], 'Wd', fs)
- a_z_weighted = self._apply_frequency_weighting(df['a_z_body'], 'Wk', fs)
- # 对角速度也应用适当的频率加权
- # 注意:ISO标准没有直接指定角速度的加权,这里使用简化处理
- omega_roll_weighted = omega_roll # 可以根据需要应用适当的滤波
- omega_pitch_weighted = omega_pitch
- omega_yaw_weighted = omega_yaw
- # 计算加权均方根值 (r.m.s.)
- # 对每个方向的加速度/角速度平方后求平均,再开平方根
- a_x_rms = np.sqrt(np.mean(a_x_weighted ** 2))
- a_y_rms = np.sqrt(np.mean(a_y_weighted ** 2))
- a_z_rms = np.sqrt(np.mean(a_z_weighted ** 2))
- omega_roll_rms = np.sqrt(np.mean(omega_roll_weighted ** 2))
- omega_pitch_rms = np.sqrt(np.mean(omega_pitch_weighted ** 2))
- omega_yaw_rms = np.sqrt(np.mean(omega_yaw_weighted ** 2))
- # 计算综合加权加速度
- ava_vav = np.sqrt(
- k_x * a_x_rms ** 2 +
- k_y * a_y_rms ** 2 +
- k_z * a_z_rms ** 2 +
- k_roll * omega_roll_rms ** 2 +
- k_pitch * omega_pitch_rms ** 2 +
- k_yaw * omega_yaw_rms ** 2
- )
- # 记录计算结果
- self.calculated_value['ava_vav'] = ava_vav
- self.logger.info(f"多维度综合加权加速度(ava_vav)计算结果: {ava_vav}")
- return ava_vav
- def calculate_msdv(self):
- """计算晕动剂量值(Motion Sickness Dose Value, MSDV)指标并检测高值事件"""
- # 实际计算MSDV
- msdv_value = self._calculate_msdv()
- # 检测高值事件
- # 直接设置阈值
- self._detect_threshold_events(
- msdv_value,
- 'msdv',
- min_threshold=0.0,
- max_threshold=6.0 # 硬编码阈值
- )
- # self.generate_metric_chart('msdv')
- return msdv_value
- def _calculate_msdv(self):
- """实际计算晕动剂量值"""
- # 获取数据
- df = self.ego_df.copy()
- # 使用车身坐标系下的加速度数据
- if 'lon_acc_vehicle' in df.columns and 'lat_acc_vehicle' in df.columns:
- # 使用已转换的数据
- a_x_body = df['lon_acc_vehicle']
- a_y_body = df['lat_acc_vehicle']
- a_z_body = df['acc_z_vehicle'] if 'acc_z_vehicle' in df.columns else pd.Series(np.zeros(len(df)))
- else:
- self.logger.warning("缺少lon_acc_vehicle和lat_acc_vehicle数据,无法进行坐标转换")
- return self.calculated_value['msdv']
- # 计算时间差
- df['time_diff'] = df['simTime'].diff().fillna(0)
- total_time = df['time_diff'].sum()
- # 估计采样频率
- if len(df) > 1:
- time_diff = df['simTime'].diff().median()
- fs = 1.0 / time_diff if time_diff > 0 else 100 # 默认100Hz
- else:
- fs = 100 # 默认采样频率
- # 对各方向加速度应用适当的频率加权
- # 对于晕动评估,使用Wf加权滤波器
- a_x_weighted = self._apply_frequency_weighting(a_x_body, 'Wf', fs)
- a_y_weighted = self._apply_frequency_weighting(a_y_body, 'Wf', fs)
- a_z_weighted = self._apply_frequency_weighting(a_z_body, 'Wf', fs)
- # 先计算加权均方根值 (r.m.s.)
- a_x_rms = np.sqrt(np.sum(a_x_weighted ** 2 * df['time_diff']) / total_time)
- a_y_rms = np.sqrt(np.sum(a_y_weighted ** 2 * df['time_diff']) / total_time)
- a_z_rms = np.sqrt(np.sum(a_z_weighted ** 2 * df['time_diff']) / total_time)
- # 记录r.m.s.值用于参考
- self.logger.info(f"X方向加权均方根值: {a_x_rms}")
- self.logger.info(f"Y方向加权均方根值: {a_y_rms}")
- self.logger.info(f"Z方向加权均方根值: {a_z_rms}")
- # 计算MSDV - 基于r.m.s.值和总时间
- msdv_x = a_x_rms * np.sqrt(total_time)
- msdv_y = a_y_rms * np.sqrt(total_time)
- msdv_z = a_z_rms * np.sqrt(total_time)
- # 综合MSDV - 可以使用向量和或加权和
- # 根据ISO 2631标准,垂直方向(Z)的权重通常更高
- msdv = np.sqrt(msdv_x ** 2 + msdv_y ** 2 + (1.4 * msdv_z) ** 2)
- # 记录计算结果
- self.calculated_value['msdv'] = msdv
- self.logger.info(f"晕动剂量值(MSDV)计算结果: {msdv}")
- self.logger.info(f"X方向MSDV: {msdv_x}, Y方向MSDV: {msdv_y}, Z方向MSDV: {msdv_z}")
- # 生成MSDV指标图表
- # self.generate_metric_chart('msdv')
- return msdv
- def calculate_zigzag_count(self):
- """计算蛇行指标并检测事件"""
- # 原有的计算逻辑
- self._zigzag_detector()
- # 检测蛇行事件
- zigzag_events = self._detect_zigzag_events()
- self.generate_metric_chart('zigzag')
- # 返回事件次数
- return len(zigzag_events)
- def _detect_zigzag_events(self):
- """检测蛇行事件"""
- # 获取蛇行时间列表
- if not self.zigzag_time_list:
- return []
- # 创建事件列表
- events = []
- for time_range in self.zigzag_time_list:
- start_time, end_time = time_range
- start_frame = get_frame_with_time(self.time_list, self.frame_list, start_time)
- end_frame = get_frame_with_time(self.time_list, self.frame_list, end_time)
- events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'zigzag'
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'zigzag'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- return events
- def calculate_shake_count(self):
- """计算晃动指标并检测事件"""
- # 原有的计算逻辑
- self._shake_detector()
- # 检测晃动事件
- shake_events = self._detect_shake_events()
- self.generate_metric_chart('shake')
- # 返回事件次数
- return len(shake_events)
- def _detect_shake_events(self):
- """检测晃动事件"""
- # 获取晃动事件数据
- if not self.shake_events:
- return []
- # 创建事件列表
- events = []
- for event in self.shake_events:
- events.append({
- 'start_time': event['start_time'],
- 'end_time': event['end_time'],
- 'start_frame': event['start_frame'],
- 'end_frame': event['end_frame'],
- 'type': 'shake'
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': event['start_time'],
- 'end_time': event['end_time'],
- 'start_frame': event['start_frame'],
- 'end_frame': event['end_frame'],
- 'type': 'shake'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- return events
- def calculate_cadence_count(self):
- """计算顿挫指标并检测事件"""
- # 原有的计算逻辑
- cadence_events = self._cadence_detector()
- self.generate_metric_chart('cadence')
- # 返回事件次数
- return len(cadence_events)
- def _cadence_detector(self):
- """检测顿挫事件"""
- # 原有的检测逻辑
- df = self.ego_df.copy()
- # 检查必要字段是否存在
- required_fields = ['simTime', 'simFrame', 'cadence']
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
- if not all(field in df.columns for field in required_fields + [acc_field]):
- missing_fields = [field for field in required_fields + [acc_field] if field not in df.columns]
- self.logger.warning(f"顿挫检测缺少必要字段: {missing_fields},无法进行检测")
- self.cadence_count = 0
- return []
- # 提取必要字段
- df = df[['simTime', 'simFrame', acc_field, 'cadence']].copy()
- # 重命名列以保持代码一致性
- df.rename(columns={acc_field: 'acc_used'}, inplace=True)
- # 滤除无效cadence值
- df = df[df['cadence'].notna()].copy()
- df['cadence_diff'] = df['cadence'].diff()
- df.dropna(subset=['cadence_diff'], inplace=True)
- df = df[df['cadence_diff'] != 0]
- if df.empty:
- self.logger.info("未检测到明显cadence变化,未触发顿挫事件")
- self.cadence_count = 0
- return []
- # 提取突变点信息
- time_list = df['simTime'].tolist()
- frame_list = df['simFrame'].tolist()
- # 聚类突变点:按时间差小于 TIME_RANGE 分组
- TIME_RANGE = 1.0 # 秒
- grouped_times, grouped_frames = [], []
- temp_times, temp_frames = [], []
- for i in range(len(time_list)):
- if not temp_times or (time_list[i] - temp_times[-1] <= TIME_RANGE):
- temp_times.append(time_list[i])
- temp_frames.append(frame_list[i])
- else:
- if len(temp_times) >= 1:
- grouped_times.append(temp_times)
- grouped_frames.append(temp_frames)
- temp_times, temp_frames = [time_list[i]], [frame_list[i]]
- if len(temp_times) >= 1:
- grouped_times.append(temp_times)
- grouped_frames.append(temp_frames)
- # 只保留有效顿挫组
- cadence_time_ranges = [[g[0], g[-1]] for g in grouped_times]
- cadence_frame_ranges = [[g[0], g[-1]] for g in grouped_frames]
- # 输出结果到 discomfort_df
- for i in range(len(cadence_time_ranges)):
- start_time = cadence_time_ranges[i][0]
- end_time = cadence_time_ranges[i][1]
- start_frame = cadence_frame_ranges[i][0]
- end_frame = cadence_frame_ranges[i][1]
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'cadence'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 计算强度与频率(用于量化)
- stre_list, freq_list = [], []
- for group_times in grouped_times:
- g_df = df[df['simTime'].isin(group_times)]
- strength = g_df['acc_used'].abs().mean()
- stre_list.append(strength)
- if len(group_times) > 1:
- t_delta = group_times[-1] - group_times[0]
- freq = len(group_times) / t_delta if t_delta > 0 else 0
- freq_list.append(freq)
- # 存储检测统计
- self.cadence_count = len(cadence_time_ranges)
- cadence_strength = np.mean(stre_list) if stre_list else 0
- self.logger.info(f"检测到 {self.cadence_count} 次顿挫事件,平均强度:{cadence_strength:.2f}")
- # 记录使用的加速度字段
- self.logger.info(f"顿挫检测使用的加速度字段: {acc_field}")
- return cadence_time_ranges
- def calculate_slam_brake_count(self):
- """计算急刹车指标并检测事件"""
- # 原有的计算逻辑
- self._slam_brake_detector()
- # 返回事件次数
- # 生成急刹车指标图表
- self.generate_metric_chart('slamBrake')
- return self.slam_brake_count
- def _slam_brake_detector(self):
- """检测急刹车事件"""
- # 原有的检测逻辑
- df = self.ego_df.copy()
- # 检查是否有必要的列
- if 'slam_brake' not in df.columns:
- self.logger.warning("缺少计算急刹车指标所需的数据列")
- return
- # 设置急刹车检测参数
- min_duration = 0.5 # 最小持续时间 秒
- # 检测连续的急刹车事件
- slam_brake_events = []
- in_event = False
- start_idx = 0
- for i, row in df.iterrows():
- if row['slam_brake'] == 1 and not in_event:
- # 开始新的急刹车事件
- in_event = True
- start_idx = i
- elif row['slam_brake'] == 0 and in_event:
- # 结束当前急刹车事件
- in_event = False
- end_idx = i - 1
- # 计算事件持续时间
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- # 如果持续时间超过阈值,记录为有效急刹车事件
- if duration >= min_duration:
- # 确定使用的加速度字段
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
- slam_brake_events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'min_lon_acc': df.loc[start_idx:end_idx, acc_field].min()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': 'slam_brake'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 如果最后一个事件没有结束
- if in_event:
- end_idx = len(df) - 1
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- if duration >= min_duration:
- # 确定使用的加速度字段
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
- slam_brake_events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'min_lon_acc': df.loc[start_idx:end_idx, acc_field].min()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': 'slam_brake'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 更新急刹车计数
- self.slam_brake_count = len(slam_brake_events)
- self.logger.info(f"检测到 {self.slam_brake_count} 次急刹车事件")
- def calculate_slam_accel_count(self):
- """计算急加速指标并检测事件"""
- # 原有的计算逻辑
- self._slam_accel_detector()
- self.generate_metric_chart('slamaccelerate')
- # 返回事件次数
- return self.slam_accel_count
- def _slam_accel_detector(self):
- """检测急加速事件"""
- # 原有的检测逻辑
- df = self.ego_df.copy()
- # 检查是否有必要的列
- if 'slam_accel' not in df.columns:
- self.logger.warning("缺少计算急加速指标所需的数据列")
- return
- # 设置急加速检测参数
- min_duration = 0.5 # 最小持续时间 秒
- # 检测连续的急加速事件
- slam_accel_events = []
- in_event = False
- start_idx = 0
- for i, row in df.iterrows():
- if row['slam_accel'] == 1 and not in_event:
- # 开始新的急加速事件
- in_event = True
- start_idx = i
- elif row['slam_accel'] == 0 and in_event:
- # 结束当前急加速事件
- in_event = False
- end_idx = i - 1
- # 计算事件持续时间
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- # 如果持续时间超过阈值,记录为有效急加速事件
- if duration >= min_duration:
- # 确定使用的加速度字段
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
- slam_accel_events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'max_lon_acc': df.loc[start_idx:end_idx, acc_field].max()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': 'slam_accel'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 如果最后一个事件没有结束
- if in_event:
- end_idx = len(df) - 1
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- if duration >= min_duration:
- # 确定使用的加速度字段
- acc_field = 'lon_acc_vehicle' if 'lon_acc_vehicle' in df.columns else 'lon_acc'
- slam_accel_events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'max_lon_acc': df.loc[start_idx:end_idx, acc_field].max()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': 'slam_accel'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 更新急加速计数
- self.slam_accel_count = len(slam_accel_events)
- self.logger.info(f"检测到 {self.slam_accel_count} 次急加速事件")
- # ========== 事件检测方法 ==========
- # def _detect_low_comfort_events(self, value, event_type, threshold):
- # """检测低舒适度事件"""
- # if value < threshold:
- # start_time = self.ego_df['simTime'].min()
- # end_time = self.ego_df['simTime'].max()
- # start_frame = self.ego_df['simFrame'].min()
- # end_frame = self.ego_df['simFrame'].max()
- # new_row = pd.DataFrame([{
- # 'start_time': start_time,
- # 'end_time': end_time,
- # 'start_frame': start_frame,
- # 'end_frame': end_frame,
- # 'type': event_type
- # }])
- # self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # self.logger.info(f"检测到{event_type}低值事件: {value:.2f} < {threshold}")
- # def _detect_high_probability_events(self, value, event_type, threshold):
- # """检测高概率事件"""
- # if value > threshold:
- # start_time = self.ego_df['simTime'].min()
- # end_time = self.ego_df['simTime'].max()
- # start_frame = self.ego_df['simFrame'].min()
- # end_frame = self.ego_df['simFrame'].max()
- # new_row = pd.DataFrame([{
- # 'start_time': start_time,
- # 'end_time': end_time,
- # 'start_frame': start_frame,
- # 'end_frame': end_frame,
- # 'type': event_type
- # }])
- # self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # self.logger.info(f"检测到{event_type}高值事件: {value:.2f} > {threshold}")
- def _detect_threshold_events(self, value, event_type, min_threshold=None, max_threshold=None):
- """
- 检测阈值事件 - 统一处理低值和高值事件
- 参数:
- value: 指标计算值
- event_type: 事件类型名称
- min_threshold: 最小值阈值(低于此值触发事件)
- max_threshold: 最大值阈值(高于此值触发事件)
- """
- trigger = False
- reason = ""
- # 检查是否低于最小值阈值
- if min_threshold is not None and value < min_threshold:
- trigger = True
- reason = f"{value:.2f} < {min_threshold:.2f} (min threshold)"
- # 检查是否高于最大值阈值
- if max_threshold is not None and value > max_threshold:
- trigger = True
- reason = f"{value:.2f} > {max_threshold:.2f} (max threshold)"
- # 如果触发事件,记录到不舒适事件表
- if trigger:
- start_time = self.ego_df['simTime'].min()
- end_time = self.ego_df['simTime'].max()
- start_frame = self.ego_df['simFrame'].min()
- end_frame = self.ego_df['simFrame'].max()
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': event_type
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- self.logger.info(f"检测到{event_type}事件: {reason}")
- def _detect_high_vdv_events(self, value, threshold):
- """检测高VDV事件"""
- if value > threshold:
- start_time = self.ego_df['simTime'].min()
- end_time = self.ego_df['simTime'].max()
- start_frame = self.ego_df['simFrame'].min()
- end_frame = self.ego_df['simFrame'].max()
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'vdv'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- self.logger.info(f"检测到高VDV事件: {value:.4f} > {threshold}")
- def _detect_high_ava_vav_events(self, value, threshold):
- """检测高AVA/VAV事件"""
- if value > threshold:
- start_time = self.ego_df['simTime'].min()
- end_time = self.ego_df['simTime'].max()
- start_frame = self.ego_df['simFrame'].min()
- end_frame = self.ego_df['simFrame'].max()
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'ava_vav'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- self.logger.info(f"检测到高AVA/VAV事件: {value:.4f} > {threshold}")
- def _detect_high_msdv_events(self, value, threshold):
- """检测高MSDV事件"""
- if value > threshold:
- start_time = self.ego_df['simTime'].min()
- end_time = self.ego_df['simTime'].max()
- start_frame = self.ego_df['simFrame'].min()
- end_frame = self.ego_df['simFrame'].max()
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'msdv'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- self.logger.info(f"检测到高MSDV事件: {value:.4f} > {threshold}")
- def _detect_comfort_events_from_time_series(self, series, event_type, threshold, min_duration=0.5):
- """
- 从时间序列中检测舒适性事件
- 参数:
- series: 时间序列数据 (如加速度、舒适度指标等)
- event_type: 事件类型名称
- threshold: 事件检测阈值
- min_duration: 最小事件持续时间(秒)
- """
- # 标记超过阈值的点
- df = self.ego_df.copy()
- df['exceed'] = (series > threshold).astype(int)
- # 检测连续事件
- events = []
- in_event = False
- start_idx = 0
- for i, row in df.iterrows():
- if row['exceed'] == 1 and not in_event:
- # 开始新事件
- in_event = True
- start_idx = i
- elif row['exceed'] == 0 and in_event:
- # 结束当前事件
- in_event = False
- end_idx = i - 1
- # 计算事件持续时间
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- # 如果持续时间超过阈值,记录为有效事件
- if duration >= min_duration:
- events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'max_value': series.loc[start_idx:end_idx].max()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': event_type
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 如果最后一个事件没有结束
- if in_event:
- end_idx = len(df) - 1
- start_time = df.loc[start_idx, 'simTime']
- end_time = df.loc[end_idx, 'simTime']
- duration = end_time - start_time
- if duration >= min_duration:
- events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'duration': duration,
- 'max_value': series.loc[start_idx:end_idx].max()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': df.loc[start_idx, 'simFrame'],
- 'end_frame': df.loc[end_idx, 'simFrame'],
- 'type': event_type
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- return events
- # ========== 辅助方法 ==========
- def _calculate_psd(self, signal, fs):
- """计算信号的功率谱密度
- Args:
- signal: 输入信号
- fs: 采样频率
- Returns:
- tuple: 频率和对应的功率谱密度
- """
- # 使用Welch方法计算PSD
- from scipy import signal as sp_signal
- f, psd = sp_signal.welch(signal, fs, nperseg=min(256, len(signal) // 2))
- return f, psd
- def _cal_cur_ego_path(self, row):
- """计算车辆轨迹曲率"""
- try:
- divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
- if not divide:
- res = None
- else:
- res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
- except:
- res = None
- return res
- def _peak_valley_determination(self, df):
- """确定角速度的峰谷"""
- peaks, _ = scipy.signal.find_peaks(
- df['speedH'], height=2.3, distance=3,
- prominence=2.3, width=1)
- valleys, _ = scipy.signal.find_peaks(
- -df['speedH'], height=2.3, distance=3,
- prominence=2.3, width=1)
- return sorted(list(peaks) + list(valleys))
- def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=4.6):
- """判断峰谷是否满足蛇行条件"""
- t_diff = p_curr[0] - p_last[0]
- v_diff = abs(p_curr[1] - p_last[1])
- s = p_curr[1] * p_last[1]
- if t_diff < tw and v_diff > avg and s < 0:
- if [p_last[0], p_curr[0]] not in self.zigzag_time_list:
- self.zigzag_time_list.append([p_last[0], p_curr[0]])
- return True
- return False
- def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
- """处理顿挫数据
- 使用车辆坐标系下的纵向加速度判断顿挫
- Args:
- lon_acc: 纵向加速度(车辆坐标系)
- ip_acc: 加速阈值
- ip_dec: 减速阈值
- Returns:
- int/float: nan表示不符合顿挫条件,1表示加速顿挫,-1表示减速顿挫,0表示正常
- """
- if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
- return np.nan
- elif abs(lon_acc) == 0:
- return 0
- elif lon_acc > 0 and lon_acc < ip_acc:
- return 1
- elif lon_acc < 0 and lon_acc > ip_dec:
- return -1
- else:
- return 0
- @peak_valley_decorator
- def _zigzag_detector(self, p_curr, p_last, flag=True):
- """检测蛇行事件"""
- if flag:
- # 记录蛇行事件的起止时间和帧号
- start_time = p_last[0]
- end_time = p_curr[0]
- start_frame = get_frame_with_time(self.time_list, self.frame_list, start_time)
- end_frame = get_frame_with_time(self.time_list, self.frame_list, end_time)
- # 计算事件持续时间
- duration = end_time - start_time
- # 设置最小持续时间阈值
- min_duration = 0.5 # 秒
- if duration >= min_duration:
- # 更新蛇行计数
- self.zigzag_count += 1
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'type': 'zigzag'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 记录事件信息到zigzag_time_list
- self.zigzag_time_list.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': start_frame,
- 'end_frame': end_frame,
- 'duration': duration
- })
- else:
- self.zigzag_count += 0
- @peak_valley_decorator
- def _cal_zigzag_strength(self, p_curr, p_last, flag=True):
- """计算蛇行强度"""
- if flag:
- v_diff = abs(p_curr[1] - p_last[1])
- t_diff = p_curr[0] - p_last[0]
- if t_diff > 0:
- self.zigzag_stre_list.append(v_diff / t_diff) # 平均角加速度
- else:
- self.zigzag_stre_list = []
- def _get_zigzag_times(self):
- """获取所有蛇行时间点"""
- all_times = []
- for time_range in self.zigzag_time_list:
- start, end = time_range
- # 获取这个时间范围内的所有时间点
- times_in_range = self.ego_df[(self.ego_df['simTime'] >= start) &
- (self.ego_df['simTime'] <= end)]['simTime'].tolist()
- all_times.extend(times_in_range)
- return all_times
- def _shake_detector(self, T_diff=0.5):
- """检测晃动事件 - 改进版本(使用向量化操作)"""
- # 获取数据
- df = self.ego_df.copy()
- # 检查是否有必要的列
- if 'lat_acc' not in df.columns or 'posH' not in df.columns:
- self.logger.warning("缺少计算晃动指标所需的数据列")
- return []
- # 将东北天坐标系下的数据转换为车身坐标系
- # 车身坐标系:X轴指向车头,Y轴指向车辆左侧,Z轴指向车顶
- df['posH_rad'] = np.radians(df['posH'])
- # 转换横向加速度到车身坐标系
- df['lat_acc_body'] = df['lat_acc'] * np.cos(df['posH_rad']) - df['lon_acc'] * np.sin(df['posH_rad'])
- # 转换横摆角速度到车身坐标系
- # speedH已经是车身坐标系下的横摆角速度,不需要转换
- df['speedH_body'] = df['speedH']
- # 1. 计算横向加速度变化率(使用车身坐标系下的横向加速度)
- df['lat_acc_rate'] = df['lat_acc_body'].diff() / df['simTime'].diff()
- # 2. 计算横摆角速度变化率(使用车身坐标系下的横摆角速度)
- df['speedH_rate'] = df['speedH_body'].diff() / df['simTime'].diff()
- # 3. 计算横摆角速度的短期变化特性
- window_size = 10 # 10帧窗口
- df['speedH_std'] = df['speedH'].rolling(window=window_size, min_periods=2).std()
- # 4. 基于车速的动态阈值
- v0 = 20 * 5 / 18 # ≈5.56 m/s
- k = 0.008 * 3.6 # =0.0288 per m/s
- df['lat_acc_threshold'] = df['v'].apply(
- lambda speed: max(
- 1.0, # 下限 1.0 m/s²
- min(
- 1.8, # 上限 1.8 m/s²
- 1.8 - k * (speed - v0) # 线性递减
- )
- )
- )
- df['speedH_threshold'] = df['v'].apply(
- lambda speed: max(1.5, min(3.0, 2.0 * (1 + (speed - 20) / 60)))
- )
- # 5. 综合判断晃动条件
- # 条件A: 横向加速度超过阈值
- condition_A = df['lat_acc'].abs() > df['lat_acc_threshold']
- # 条件B: 横向加速度变化率超过阈值
- lat_acc_rate_threshold = 0.5 # 横向加速度变化率阈值 (m/s³)
- condition_B = df['lat_acc_rate'].abs() > lat_acc_rate_threshold
- # 条件C: 横摆角速度有明显变化但不呈现周期性
- condition_C = (df['speedH_std'] > df['speedH_threshold'])
- # 综合条件: 满足条件A,且满足条件B或条件C
- shake_condition = condition_A & (condition_B | condition_C)
- # 6. 使用向量化操作检测连续事件
- event_groups = (shake_condition != shake_condition.shift()).cumsum()
- shake_events = []
- for _, group in df[shake_condition].groupby(event_groups):
- if len(group) >= 2: # 至少2帧才算一次晃动
- start_time = group['simTime'].iloc[0]
- end_time = group['simTime'].iloc[-1]
- duration = end_time - start_time
- if duration >= T_diff: # 只记录持续时间超过阈值的事件
- shake_events.append({
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': group['simFrame'].iloc[0],
- 'end_frame': group['simFrame'].iloc[-1],
- 'duration': duration,
- 'max_lat_acc': group['lat_acc'].abs().max()
- })
- # 添加到不舒适事件表
- new_row = pd.DataFrame([{
- 'start_time': start_time,
- 'end_time': end_time,
- 'start_frame': group['simFrame'].iloc[0],
- 'end_frame': group['simFrame'].iloc[-1],
- 'type': 'shake'
- }])
- self.discomfort_df = pd.concat([self.discomfort_df, new_row], ignore_index=True)
- # 更新晃动计数
- self.shake_count = len(shake_events)
- self.logger.info(f"检测到 {self.shake_count} 次晃动事件")
- # 更新ego_df中的相关列
- self.ego_df = df.copy()
- # 保存晃动事件数据
- self.shake_events = shake_events
- return shake_events
- class ComfortManager:
- """舒适性指标计算主类"""
- def __init__(self, data_processed):
- self.data = data_processed
- self.logger = LogManager().get_logger()
- self.registry = ComfortRegistry(self.data)
- def report_statistic(self):
- """生成舒适性评分报告"""
- comfort_result = self.registry.batch_execute()
- return comfort_result
|