|
@@ -1,29 +1,13 @@
|
|
#!/usr/bin/env python
|
|
#!/usr/bin/env python
|
|
# -*- coding: utf-8 -*-
|
|
# -*- coding: utf-8 -*-
|
|
-##################################################################
|
|
|
|
-#
|
|
|
|
-# Copyright (c) 2023 CICV, Inc. All Rights Reserved
|
|
|
|
-#
|
|
|
|
-##################################################################
|
|
|
|
-"""
|
|
|
|
-@Authors: zhanghaiwen(zhanghaiwen@china-icv.cn), yangzihao(yangzihao@china-icv.cn)
|
|
|
|
-@Data: 2023/06/25
|
|
|
|
-@Last Modified: 2023/06/25
|
|
|
|
-@Summary: Comfort metrics
|
|
|
|
-"""
|
|
|
|
-
|
|
|
|
-import sys
|
|
|
|
|
|
+
|
|
import math
|
|
import math
|
|
import pandas as pd
|
|
import pandas as pd
|
|
import numpy as np
|
|
import numpy as np
|
|
import scipy.signal
|
|
import scipy.signal
|
|
-from pathlib import Path
|
|
|
|
-
|
|
|
|
from modules.lib.score import Score
|
|
from modules.lib.score import Score
|
|
from modules.lib.common import get_interpolation, get_frame_with_time
|
|
from modules.lib.common import get_interpolation, get_frame_with_time
|
|
from modules.config import config
|
|
from modules.config import config
|
|
-from modules.lib import data_process
|
|
|
|
-
|
|
|
|
from modules.lib.log_manager import LogManager
|
|
from modules.lib.log_manager import LogManager
|
|
|
|
|
|
|
|
|
|
@@ -39,7 +23,6 @@ def peak_valley_decorator(method):
|
|
p_curr = pv_list[i]
|
|
p_curr = pv_list[i]
|
|
|
|
|
|
if self._peak_valley_judgment(p_last, p_curr):
|
|
if self._peak_valley_judgment(p_last, p_curr):
|
|
- # method(self, p_curr, p_last)
|
|
|
|
method(self, p_curr, p_last, flag, *args, **kwargs)
|
|
method(self, p_curr, p_last, flag, *args, **kwargs)
|
|
else:
|
|
else:
|
|
p_last = p_curr
|
|
p_last = p_curr
|
|
@@ -64,60 +47,32 @@ class Comfort(object):
|
|
"""
|
|
"""
|
|
|
|
|
|
def __init__(self, data_processed):
|
|
def __init__(self, data_processed):
|
|
-
|
|
|
|
- # self.logger = log.get_logger()
|
|
|
|
- self.eval_data = pd.DataFrame()
|
|
|
|
self.data_processed = data_processed
|
|
self.data_processed = data_processed
|
|
- self.logger = LogManager().get_logger() # 获取全局日志实例
|
|
|
|
|
|
+ self.logger = LogManager().get_logger()
|
|
|
|
|
|
- self.data = data_processed.ego_data
|
|
|
|
- # self.mileage = data_processed.report_info['mileage']
|
|
|
|
|
|
+ self.data = data_processed.ego_data.copy()
|
|
self.ego_df = pd.DataFrame()
|
|
self.ego_df = pd.DataFrame()
|
|
self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
|
|
self.discomfort_df = pd.DataFrame(columns=['start_time', 'end_time', 'start_frame', 'end_frame', 'type'])
|
|
|
|
|
|
-
|
|
|
|
self.calculated_value = {
|
|
self.calculated_value = {
|
|
- 'Weaving': 0,
|
|
|
|
|
|
+ 'weaving': 0,
|
|
'shake': 0,
|
|
'shake': 0,
|
|
'cadence': 0,
|
|
'cadence': 0,
|
|
'slamBrake': 0,
|
|
'slamBrake': 0,
|
|
'slamAccelerate': 0,
|
|
'slamAccelerate': 0,
|
|
}
|
|
}
|
|
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- # self.time_list = data_processed.driver_ctrl_data['time_list']
|
|
|
|
- # self.frame_list = data_processed.driver_ctrl_data['frame_list']
|
|
|
|
-
|
|
|
|
self.time_list = self.data['simTime'].values.tolist()
|
|
self.time_list = self.data['simTime'].values.tolist()
|
|
self.frame_list = self.data['simFrame'].values.tolist()
|
|
self.frame_list = self.data['simFrame'].values.tolist()
|
|
|
|
|
|
- self.count_dict = {}
|
|
|
|
- self.duration_dict = {}
|
|
|
|
- self.strength_dict = {}
|
|
|
|
-
|
|
|
|
- self.discomfort_count = 0
|
|
|
|
|
|
+ # 移除未使用的字典
|
|
self.zigzag_count = 0
|
|
self.zigzag_count = 0
|
|
self.shake_count = 0
|
|
self.shake_count = 0
|
|
self.cadence_count = 0
|
|
self.cadence_count = 0
|
|
self.slam_brake_count = 0
|
|
self.slam_brake_count = 0
|
|
self.slam_accel_count = 0
|
|
self.slam_accel_count = 0
|
|
|
|
|
|
- self.zigzag_strength = 0
|
|
|
|
- self.shake_strength = 0
|
|
|
|
- self.cadence_strength = 0
|
|
|
|
- self.slam_brake_strength = 0
|
|
|
|
- self.slam_accel_strength = 0
|
|
|
|
-
|
|
|
|
- self.discomfort_duration = 0
|
|
|
|
- self.zigzag_duration = 0
|
|
|
|
- self.shake_duration = 0
|
|
|
|
- self.cadence_duration = 0
|
|
|
|
- self.slam_brake_duration = 0
|
|
|
|
- self.slam_accel_duration = 0
|
|
|
|
-
|
|
|
|
self.zigzag_time_list = []
|
|
self.zigzag_time_list = []
|
|
- self.zigzag_frame_list = []
|
|
|
|
self.zigzag_stre_list = []
|
|
self.zigzag_stre_list = []
|
|
self.cur_ego_path_list = []
|
|
self.cur_ego_path_list = []
|
|
self.curvature_list = []
|
|
self.curvature_list = []
|
|
@@ -126,25 +81,12 @@ class Comfort(object):
|
|
self._comf_param_cal()
|
|
self._comf_param_cal()
|
|
|
|
|
|
def _get_data(self):
|
|
def _get_data(self):
|
|
- """
|
|
|
|
-
|
|
|
|
- """
|
|
|
|
|
|
+ """获取舒适性评估所需数据"""
|
|
self.ego_df = self.data[config.COMFORT_INFO].copy()
|
|
self.ego_df = self.data[config.COMFORT_INFO].copy()
|
|
- self.df = self.ego_df.reset_index(drop=True) # 索引是csv原索引
|
|
|
|
-
|
|
|
|
- # def _cal_cur_ego_path(self, row):
|
|
|
|
- # try:
|
|
|
|
- # divide = (row['speedX'] ** 2 + row['speedY'] ** 2) ** (3 / 2)
|
|
|
|
- # if not divide:
|
|
|
|
- # res = None
|
|
|
|
- # else:
|
|
|
|
- # res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
|
|
|
|
- # except:
|
|
|
|
- # res = None
|
|
|
|
- # return res
|
|
|
|
- import numpy as np
|
|
|
|
|
|
+ self.df = self.ego_df.reset_index(drop=True)
|
|
|
|
|
|
def _cal_cur_ego_path(self, row):
|
|
def _cal_cur_ego_path(self, row):
|
|
|
|
+ """计算车辆轨迹曲率"""
|
|
try:
|
|
try:
|
|
# 计算速度平方和,判断是否接近零
|
|
# 计算速度平方和,判断是否接近零
|
|
speed_sq = row['speedX']**2 + row['speedY']**2
|
|
speed_sq = row['speedX']**2 + row['speedY']**2
|
|
@@ -153,16 +95,12 @@ class Comfort(object):
|
|
divide = speed_sq ** (3/2)
|
|
divide = speed_sq ** (3/2)
|
|
res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
|
|
res = (row['speedX'] * row['accelY'] - row['speedY'] * row['accelX']) / divide
|
|
return res
|
|
return res
|
|
- except Exception as e:
|
|
|
|
|
|
+ except Exception:
|
|
return 1e5 # 异常时也返回极大值(如除零、缺失值等)
|
|
return 1e5 # 异常时也返回极大值(如除零、缺失值等)
|
|
|
|
|
|
-
|
|
|
|
def _comf_param_cal(self):
|
|
def _comf_param_cal(self):
|
|
- """
|
|
|
|
-
|
|
|
|
- """
|
|
|
|
-
|
|
|
|
- # [log]
|
|
|
|
|
|
+ """计算舒适性相关参数"""
|
|
|
|
+ # 加减速阈值计算
|
|
self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
|
|
self.ego_df['ip_acc'] = self.ego_df['v'].apply(get_interpolation, point1=[18, 4], point2=[72, 2])
|
|
self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
|
|
self.ego_df['ip_dec'] = self.ego_df['v'].apply(get_interpolation, point1=[18, -5], point2=[72, -3.5])
|
|
self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
|
|
self.ego_df['slam_brake'] = (self.ego_df['lon_acc'] - self.ego_df['ip_dec']).apply(
|
|
@@ -172,7 +110,7 @@ class Comfort(object):
|
|
self.ego_df['cadence'] = self.ego_df.apply(
|
|
self.ego_df['cadence'] = self.ego_df.apply(
|
|
lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
|
|
lambda row: self._cadence_process_new(row['lon_acc'], row['ip_acc'], row['ip_dec']), axis=1)
|
|
|
|
|
|
- # for shake detector
|
|
|
|
|
|
+ # 晃动检测相关参数
|
|
self.ego_df['cur_ego_path'] = self.ego_df.apply(self._cal_cur_ego_path, axis=1)
|
|
self.ego_df['cur_ego_path'] = self.ego_df.apply(self._cal_cur_ego_path, axis=1)
|
|
self.ego_df['curvHor'] = self.ego_df['curvHor'].astype('float')
|
|
self.ego_df['curvHor'] = self.ego_df['curvHor'].astype('float')
|
|
self.ego_df['cur_diff'] = (self.ego_df['cur_ego_path'] - self.ego_df['curvHor']).abs()
|
|
self.ego_df['cur_diff'] = (self.ego_df['cur_ego_path'] - self.ego_df['curvHor']).abs()
|
|
@@ -185,33 +123,17 @@ class Comfort(object):
|
|
|
|
|
|
def _peak_valley_determination(self, df):
|
|
def _peak_valley_determination(self, df):
|
|
"""
|
|
"""
|
|
- Determine the peak and valley of the vehicle based on its current angular velocity.
|
|
|
|
-
|
|
|
|
- Parameters:
|
|
|
|
- df: Dataframe containing the vehicle angular velocity.
|
|
|
|
-
|
|
|
|
- Returns:
|
|
|
|
- peak_valley: List of indices representing peaks and valleys.
|
|
|
|
|
|
+ 确定车辆角速度的峰值和谷值
|
|
"""
|
|
"""
|
|
-
|
|
|
|
- peaks, _ = scipy.signal.find_peaks(df['speedH'], height=0.01, distance=1, prominence=0.01)
|
|
|
|
- valleys, _ = scipy.signal.find_peaks(-df['speedH'], height=0.01, distance=1, prominence=0.01)
|
|
|
|
|
|
+ # 调整参数以减少噪音干扰
|
|
|
|
+ peaks, _ = scipy.signal.find_peaks(df['speedH'], height=0.03, distance=3, prominence=0.03, width=1)
|
|
|
|
+ valleys, _ = scipy.signal.find_peaks(-df['speedH'], height=0.03, distance=3, prominence=0.03, width=1)
|
|
peak_valley = sorted(list(peaks) + list(valleys))
|
|
peak_valley = sorted(list(peaks) + list(valleys))
|
|
-
|
|
|
|
return peak_valley
|
|
return peak_valley
|
|
|
|
|
|
- def _peak_valley_judgment(self, p_last, p_curr, tw=10000, avg=0.02):
|
|
|
|
|
|
+ def _peak_valley_judgment(self, p_last, p_curr, tw=100, avg=0.06):
|
|
"""
|
|
"""
|
|
- Determine if the given peaks and valleys satisfy certain conditions.
|
|
|
|
-
|
|
|
|
- Parameters:
|
|
|
|
- p_last: Previous peak or valley data point.
|
|
|
|
- p_curr: Current peak or valley data point.
|
|
|
|
- tw: Threshold time difference between peaks and valleys.
|
|
|
|
- avg: Angular velocity gap threshold.
|
|
|
|
-
|
|
|
|
- Returns:
|
|
|
|
- Boolean indicating whether the conditions are satisfied.
|
|
|
|
|
|
+ 判断给定的峰值和谷值是否满足特定条件
|
|
"""
|
|
"""
|
|
t_diff = p_curr[0] - p_last[0]
|
|
t_diff = p_curr[0] - p_last[0]
|
|
v_diff = abs(p_curr[1] - p_last[1])
|
|
v_diff = abs(p_curr[1] - p_last[1])
|
|
@@ -224,29 +146,13 @@ class Comfort(object):
|
|
|
|
|
|
@peak_valley_decorator
|
|
@peak_valley_decorator
|
|
def zigzag_count_func(self, p_curr, p_last, flag=True):
|
|
def zigzag_count_func(self, p_curr, p_last, flag=True):
|
|
- """
|
|
|
|
- Count the number of zigzag movements.
|
|
|
|
-
|
|
|
|
- Parameters:
|
|
|
|
- df: Input dataframe data.
|
|
|
|
-
|
|
|
|
- Returns:
|
|
|
|
- zigzag_count: Number of zigzag movements.
|
|
|
|
- """
|
|
|
|
|
|
+ """计算曲折行驶次数"""
|
|
if flag:
|
|
if flag:
|
|
self.zigzag_count += 1
|
|
self.zigzag_count += 1
|
|
- else:
|
|
|
|
- self.zigzag_count += 0
|
|
|
|
|
|
|
|
@peak_valley_decorator
|
|
@peak_valley_decorator
|
|
def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
|
|
def cal_zigzag_strength_strength(self, p_curr, p_last, flag=True):
|
|
- """
|
|
|
|
- Calculate various strength statistics.
|
|
|
|
-
|
|
|
|
- Returns:
|
|
|
|
- Tuple containing maximum strength, minimum strength,
|
|
|
|
- average strength, and 99th percentile strength.
|
|
|
|
- """
|
|
|
|
|
|
+ """计算曲折行驶强度"""
|
|
if flag:
|
|
if flag:
|
|
v_diff = abs(p_curr[1] - p_last[1])
|
|
v_diff = abs(p_curr[1] - p_last[1])
|
|
t_diff = p_curr[0] - p_last[0]
|
|
t_diff = p_curr[0] - p_last[0]
|
|
@@ -256,23 +162,14 @@ class Comfort(object):
|
|
self.zigzag_stre_list = []
|
|
self.zigzag_stre_list = []
|
|
|
|
|
|
def _shake_detector(self, Cr_diff=0.05, T_diff=0.39):
|
|
def _shake_detector(self, Cr_diff=0.05, T_diff=0.39):
|
|
- """
|
|
|
|
- ego车横向加速度ax;
|
|
|
|
- ego车轨迹横向曲率;
|
|
|
|
- ego车轨迹曲率变化率;
|
|
|
|
- ego车所在车lane曲率;
|
|
|
|
- ego车所在车lane曲率变化率;
|
|
|
|
- 转向灯(暂时存疑,可不用)Cr_diff = 0.1, T_diff = 0.04
|
|
|
|
- 求解曲率公式k(t) = (x'(t) * y''(t) - y'(t) * x''(t)) / ((x'(t))^2 + (y'(t))^2)^(3/2)
|
|
|
|
- """
|
|
|
|
|
|
+ """检测晃动事件"""
|
|
time_list = []
|
|
time_list = []
|
|
frame_list = []
|
|
frame_list = []
|
|
- shake_time_list = []
|
|
|
|
|
|
|
|
df = self.ego_df.copy()
|
|
df = self.ego_df.copy()
|
|
df = df[df['cur_diff'] > Cr_diff]
|
|
df = df[df['cur_diff'] > Cr_diff]
|
|
df['frame_ID_diff'] = df['simFrame'].diff() # 找出行车轨迹曲率与道路曲率之差大于阈值的数据段
|
|
df['frame_ID_diff'] = df['simFrame'].diff() # 找出行车轨迹曲率与道路曲率之差大于阈值的数据段
|
|
- filtered_df = df[df.frame_ID_diff > T_diff] # 此处是用大间隔区分多次晃动情景 。
|
|
|
|
|
|
+ filtered_df = df[df.frame_ID_diff > T_diff] # 此处是用大间隔区分多次晃动情景
|
|
|
|
|
|
row_numbers = filtered_df.index.tolist()
|
|
row_numbers = filtered_df.index.tolist()
|
|
cut_column = pd.cut(df.index, bins=row_numbers)
|
|
cut_column = pd.cut(df.index, bins=row_numbers)
|
|
@@ -287,8 +184,6 @@ class Comfort(object):
|
|
df_group['curvHor'] = df_group['curvHor'].abs()
|
|
df_group['curvHor'] = df_group['curvHor'].abs()
|
|
df_group_straight = df_group[(df_group.lightMask == 0) & (df_group.curvHor < 0.001)]
|
|
df_group_straight = df_group[(df_group.lightMask == 0) & (df_group.curvHor < 0.001)]
|
|
if not df_group_straight.empty:
|
|
if not df_group_straight.empty:
|
|
- tmp_list = df_group_straight['simTime'].values
|
|
|
|
- # shake_time_list.append([tmp_list[0], tmp_list[-1]])
|
|
|
|
time_list.extend(df_group_straight['simTime'].values)
|
|
time_list.extend(df_group_straight['simTime'].values)
|
|
frame_list.extend(df_group_straight['simFrame'].values)
|
|
frame_list.extend(df_group_straight['simFrame'].values)
|
|
self.shake_count = self.shake_count + 1
|
|
self.shake_count = self.shake_count + 1
|
|
@@ -297,8 +192,6 @@ class Comfort(object):
|
|
df_group_change_lane = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'] < 0.001)]
|
|
df_group_change_lane = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'] < 0.001)]
|
|
df_group_change_lane_data = df_group_change_lane[df_group_change_lane.cur_diff > Cr_diff + 0.2]
|
|
df_group_change_lane_data = df_group_change_lane[df_group_change_lane.cur_diff > Cr_diff + 0.2]
|
|
if not df_group_change_lane_data.empty:
|
|
if not df_group_change_lane_data.empty:
|
|
- tmp_list = df_group_change_lane_data['simTime'].values
|
|
|
|
- # shake_time_list.append([tmp_list[0], tmp_list[-1]])
|
|
|
|
time_list.extend(df_group_change_lane_data['simTime'].values)
|
|
time_list.extend(df_group_change_lane_data['simTime'].values)
|
|
frame_list.extend(df_group_change_lane_data['simFrame'].values)
|
|
frame_list.extend(df_group_change_lane_data['simFrame'].values)
|
|
self.shake_count = self.shake_count + 1
|
|
self.shake_count = self.shake_count + 1
|
|
@@ -307,8 +200,6 @@ class Comfort(object):
|
|
df_group_turn = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'].abs() > 0.001)]
|
|
df_group_turn = df_group[(df_group['lightMask'] != 0) & (df_group['curvHor'].abs() > 0.001)]
|
|
df_group_turn_data = df_group_turn[df_group_turn.cur_diff.abs() > Cr_diff + 0.1]
|
|
df_group_turn_data = df_group_turn[df_group_turn.cur_diff.abs() > Cr_diff + 0.1]
|
|
if not df_group_turn_data.empty:
|
|
if not df_group_turn_data.empty:
|
|
- tmp_list = df_group_turn_data['simTime'].values
|
|
|
|
- # shake_time_list.append([tmp_list[0], tmp_list[-1]])
|
|
|
|
time_list.extend(df_group_turn_data['simTime'].values)
|
|
time_list.extend(df_group_turn_data['simTime'].values)
|
|
frame_list.extend(df_group_turn_data['simFrame'].values)
|
|
frame_list.extend(df_group_turn_data['simFrame'].values)
|
|
self.shake_count = self.shake_count + 1
|
|
self.shake_count = self.shake_count + 1
|
|
@@ -330,13 +221,13 @@ class Comfort(object):
|
|
sub_group_time = [t_list[i]]
|
|
sub_group_time = [t_list[i]]
|
|
sub_group_frame = [f_list[i]]
|
|
sub_group_frame = [f_list[i]]
|
|
|
|
|
|
-
|
|
|
|
# 输出图表值
|
|
# 输出图表值
|
|
shake_time = [[g[0], g[-1]] for g in group_time]
|
|
shake_time = [[g[0], g[-1]] for g in group_time]
|
|
shake_frame = [[g[0], g[-1]] for g in group_frame]
|
|
shake_frame = [[g[0], g[-1]] for g in group_frame]
|
|
self.shake_count = len(shake_time)
|
|
self.shake_count = len(shake_time)
|
|
|
|
|
|
if shake_time:
|
|
if shake_time:
|
|
|
|
+ # 保存晃动事件摘要
|
|
time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
|
|
time_df = pd.DataFrame(shake_time, columns=['start_time', 'end_time'])
|
|
frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
|
|
frame_df = pd.DataFrame(shake_frame, columns=['start_frame', 'end_frame'])
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
@@ -345,21 +236,10 @@ class Comfort(object):
|
|
|
|
|
|
return time_list
|
|
return time_list
|
|
|
|
|
|
- def _cadence_process(self, lon_acc_roc, ip_dec_roc):
|
|
|
|
- if abs(lon_acc_roc) >= abs(ip_dec_roc) or abs(lon_acc_roc) < 1:
|
|
|
|
- return np.nan
|
|
|
|
- # elif abs(lon_acc_roc) == 0:
|
|
|
|
- elif abs(lon_acc_roc) == 0:
|
|
|
|
- return 0
|
|
|
|
- elif lon_acc_roc > 0 and lon_acc_roc < -ip_dec_roc:
|
|
|
|
- return 1
|
|
|
|
- elif lon_acc_roc < 0 and lon_acc_roc > ip_dec_roc:
|
|
|
|
- return -1
|
|
|
|
-
|
|
|
|
def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
|
|
def _cadence_process_new(self, lon_acc, ip_acc, ip_dec):
|
|
|
|
+ """处理顿挫数据"""
|
|
if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
|
|
if abs(lon_acc) < 1 or lon_acc > ip_acc or lon_acc < ip_dec:
|
|
return np.nan
|
|
return np.nan
|
|
- # elif abs(lon_acc_roc) == 0:
|
|
|
|
elif abs(lon_acc) == 0:
|
|
elif abs(lon_acc) == 0:
|
|
return 0
|
|
return 0
|
|
elif lon_acc > 0 and lon_acc < ip_acc:
|
|
elif lon_acc > 0 and lon_acc < ip_acc:
|
|
@@ -370,17 +250,8 @@ class Comfort(object):
|
|
return 0
|
|
return 0
|
|
|
|
|
|
def _cadence_detector(self):
|
|
def _cadence_detector(self):
|
|
- """
|
|
|
|
- # 加速度突变:先加后减,先减后加,先加然后停,先减然后停
|
|
|
|
- # 顿挫:2s内多次加速度变化率突变
|
|
|
|
- # 求出每一个特征点,然后提取,然后将每一个特征点后面的2s做一个窗口,统计频率,避免无效运算
|
|
|
|
-
|
|
|
|
- # 将特征点筛选出来
|
|
|
|
- # 将特征点时间作为聚类标准,大于1s的pass,小于等于1s的聚类到一个分组
|
|
|
|
- # 去掉小于3个特征点的分组
|
|
|
|
- """
|
|
|
|
- # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'cadence']].copy()
|
|
|
|
- data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence']].copy()
|
|
|
|
|
|
+ """检测顿挫事件"""
|
|
|
|
+ data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'cadence', 'v']].copy()
|
|
time_list = data['simTime'].values.tolist()
|
|
time_list = data['simTime'].values.tolist()
|
|
|
|
|
|
data = data[data['cadence'] != np.nan]
|
|
data = data[data['cadence'] != np.nan]
|
|
@@ -411,14 +282,12 @@ class Comfort(object):
|
|
group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
|
|
group_time = [g for g in group_time if len(g) >= 1] # 有一次特征点则算作一次顿挫
|
|
group_frame = [g for g in group_frame if len(g) >= 1]
|
|
group_frame = [g for g in group_frame if len(g) >= 1]
|
|
|
|
|
|
-
|
|
|
|
- # 将顿挫组的起始时间为组重新统计时间
|
|
|
|
-
|
|
|
|
# 输出图表值
|
|
# 输出图表值
|
|
cadence_time = [[g[0], g[-1]] for g in group_time]
|
|
cadence_time = [[g[0], g[-1]] for g in group_time]
|
|
cadence_frame = [[g[0], g[-1]] for g in group_frame]
|
|
cadence_frame = [[g[0], g[-1]] for g in group_frame]
|
|
|
|
|
|
if cadence_time:
|
|
if cadence_time:
|
|
|
|
+ # 保存顿挫事件摘要
|
|
time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
|
|
time_df = pd.DataFrame(cadence_time, columns=['start_time', 'end_time'])
|
|
frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
|
|
frame_df = pd.DataFrame(cadence_frame, columns=['start_frame', 'end_frame'])
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
@@ -426,40 +295,16 @@ class Comfort(object):
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
|
|
|
|
# 将顿挫组的起始时间为组重新统计时间
|
|
# 将顿挫组的起始时间为组重新统计时间
|
|
- cadence_time_list = [time for pair in cadence_time for time in time_list if pair[0] <= time <= pair[1]]
|
|
|
|
-
|
|
|
|
- # time_list = [element for sublist in group_time for element in sublist]
|
|
|
|
- # merged_list = [element for sublist in res_group for element in sublist]
|
|
|
|
- # res_df = data[data['simTime'].isin(merged_list)]
|
|
|
|
-
|
|
|
|
- stre_list = []
|
|
|
|
- freq_list = []
|
|
|
|
- for g in group_time:
|
|
|
|
- # calculate strength
|
|
|
|
- g_df = data[data['simTime'].isin(g)]
|
|
|
|
- strength = g_df['lon_acc'].abs().mean()
|
|
|
|
- stre_list.append(strength)
|
|
|
|
-
|
|
|
|
- # calculate frequency
|
|
|
|
- cnt = len(g)
|
|
|
|
- t_start = g_df['simTime'].iloc[0]
|
|
|
|
- t_end = g_df['simTime'].iloc[-1]
|
|
|
|
- t_delta = t_end - t_start
|
|
|
|
- frequency = cnt / t_delta
|
|
|
|
- freq_list.append(frequency)
|
|
|
|
-
|
|
|
|
- self.cadence_count = len(freq_list)
|
|
|
|
- cadence_stre = sum(stre_list) / len(stre_list) if stre_list else 0
|
|
|
|
|
|
+ cadence_time_list = [time for pair in cadence_time for time in self.ego_df['simTime'].values if pair[0] <= time <= pair[1]]
|
|
|
|
+
|
|
|
|
+ self.cadence_count = len(cadence_time)
|
|
|
|
|
|
return cadence_time_list
|
|
return cadence_time_list
|
|
|
|
|
|
def _slam_brake_detector(self):
|
|
def _slam_brake_detector(self):
|
|
- # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
|
|
|
|
- # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_dec_roc', 'slam_brake']].copy()
|
|
|
|
- data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake']].copy()
|
|
|
|
- # data['slam_diff'] = data['slam_brake'].diff()
|
|
|
|
- # res_df = data[data['slam_diff'] == 1]
|
|
|
|
-
|
|
|
|
|
|
+ """检测急刹车事件"""
|
|
|
|
+ data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'lon_acc_roc', 'ip_dec', 'slam_brake', 'v']].copy()
|
|
|
|
+
|
|
res_df = data[data['slam_brake'] == 1]
|
|
res_df = data[data['slam_brake'] == 1]
|
|
t_list = res_df['simTime'].values
|
|
t_list = res_df['simTime'].values
|
|
f_list = res_df['simFrame'].values.tolist()
|
|
f_list = res_df['simFrame'].values.tolist()
|
|
@@ -489,6 +334,7 @@ class Comfort(object):
|
|
slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
|
|
slam_brake_frame = [[g[0], g[-1]] for g in group_frame]
|
|
|
|
|
|
if slam_brake_time:
|
|
if slam_brake_time:
|
|
|
|
+ # 保存事件摘要
|
|
time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
|
|
time_df = pd.DataFrame(slam_brake_time, columns=['start_time', 'end_time'])
|
|
frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
|
|
frame_df = pd.DataFrame(slam_brake_frame, columns=['start_frame', 'end_frame'])
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
@@ -496,16 +342,13 @@ class Comfort(object):
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
|
|
|
|
time_list = [element for sublist in group_time for element in sublist]
|
|
time_list = [element for sublist in group_time for element in sublist]
|
|
- self.slam_brake_count = len(group_time) # / self.mileage # * 1000000
|
|
|
|
|
|
+ self.slam_brake_count = len(group_time)
|
|
return time_list
|
|
return time_list
|
|
|
|
|
|
def _slam_accel_detector(self):
|
|
def _slam_accel_detector(self):
|
|
- # 统计急刹全为1的分段的个数,记录分段开头的frame_ID
|
|
|
|
- # data = self.ego_df[['simTime', 'simFrame', 'lon_acc_roc', 'ip_acc_roc', 'slam_accel']].copy()
|
|
|
|
- data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel']].copy()
|
|
|
|
- # data['slam_diff'] = data['slam_accel'].diff()
|
|
|
|
- # res_df = data.loc[data['slam_diff'] == 1]
|
|
|
|
-
|
|
|
|
|
|
+ """检测急加速事件"""
|
|
|
|
+ data = self.ego_df[['simTime', 'simFrame', 'lon_acc', 'ip_acc', 'slam_accel', 'v']].copy()
|
|
|
|
+
|
|
res_df = data.loc[data['slam_accel'] == 1]
|
|
res_df = data.loc[data['slam_accel'] == 1]
|
|
t_list = res_df['simTime'].values
|
|
t_list = res_df['simTime'].values
|
|
f_list = res_df['simFrame'].values.tolist()
|
|
f_list = res_df['simFrame'].values.tolist()
|
|
@@ -529,12 +372,12 @@ class Comfort(object):
|
|
group_time = [g for g in group_time if len(g) >= 2]
|
|
group_time = [g for g in group_time if len(g) >= 2]
|
|
group_frame = [g for g in group_frame if len(g) >= 2]
|
|
group_frame = [g for g in group_frame if len(g) >= 2]
|
|
|
|
|
|
-
|
|
|
|
# 输出图表值
|
|
# 输出图表值
|
|
slam_accel_time = [[g[0], g[-1]] for g in group_time]
|
|
slam_accel_time = [[g[0], g[-1]] for g in group_time]
|
|
slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
|
|
slam_accel_frame = [[g[0], g[-1]] for g in group_frame]
|
|
|
|
|
|
if slam_accel_time:
|
|
if slam_accel_time:
|
|
|
|
+ # 保存事件摘要
|
|
time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
|
|
time_df = pd.DataFrame(slam_accel_time, columns=['start_time', 'end_time'])
|
|
frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
|
|
frame_df = pd.DataFrame(slam_accel_frame, columns=['start_frame', 'end_frame'])
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
@@ -542,22 +385,21 @@ class Comfort(object):
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
|
|
|
|
time_list = [element for sublist in group_time for element in sublist]
|
|
time_list = [element for sublist in group_time for element in sublist]
|
|
- self.slam_accel_count = len(group_time) # / self.mileage # * 1000000
|
|
|
|
|
|
+ self.slam_accel_count = len(group_time)
|
|
return time_list
|
|
return time_list
|
|
-
|
|
|
|
|
|
+
|
|
def comf_statistic(self):
|
|
def comf_statistic(self):
|
|
-
|
|
|
|
- df = self.ego_df[['simTime', 'cur_diff', 'lon_acc', 'lon_acc_roc', 'accelH']].copy()
|
|
|
|
|
|
+ """统计舒适性指标"""
|
|
|
|
+ df = self.ego_df[['simTime', 'simFrame', 'cur_diff', 'lon_acc', 'lon_acc_roc', 'accelH', 'speedH', 'lat_acc', 'v']].copy()
|
|
|
|
|
|
self.zigzag_count_func()
|
|
self.zigzag_count_func()
|
|
self.cal_zigzag_strength_strength()
|
|
self.cal_zigzag_strength_strength()
|
|
if self.zigzag_time_list:
|
|
if self.zigzag_time_list:
|
|
|
|
+ # 保存 Weaving (zigzag) 事件摘要
|
|
zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
|
|
zigzag_df = pd.DataFrame(self.zigzag_time_list, columns=['start_time', 'end_time'])
|
|
zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
|
|
zigzag_df = get_frame_with_time(zigzag_df, self.ego_df)
|
|
zigzag_df['type'] = 'zigzag'
|
|
zigzag_df['type'] = 'zigzag'
|
|
self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
|
|
self.discomfort_df = pd.concat([self.discomfort_df, zigzag_df], ignore_index=True)
|
|
- # discomfort_df = pd.concat([time_df, frame_df], axis=1)
|
|
|
|
- # self.discomfort_df = pd.concat([self.discomfort_df, discomfort_df], ignore_index=True)
|
|
|
|
|
|
|
|
zigzag_t_list = []
|
|
zigzag_t_list = []
|
|
# 只有[t_start, t_end]数对,要提取为完整time list
|
|
# 只有[t_start, t_end]数对,要提取为完整time list
|
|
@@ -572,25 +414,7 @@ class Comfort(object):
|
|
slam_brake_t_list = self._slam_brake_detector()
|
|
slam_brake_t_list = self._slam_brake_detector()
|
|
slam_accel_t_list = self._slam_accel_detector()
|
|
slam_accel_t_list = self._slam_accel_detector()
|
|
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- discomfort_time_list = zigzag_t_list + shake_t_list + cadence_t_list + slam_brake_t_list + slam_accel_t_list
|
|
|
|
- discomfort_time_list = sorted(discomfort_time_list) # 排序
|
|
|
|
- discomfort_time_list = list(set(discomfort_time_list)) # 去重
|
|
|
|
-
|
|
|
|
- # TIME_DIFF = self.time_list[3] - self.time_list[2]
|
|
|
|
- # TIME_DIFF = 0.4
|
|
|
|
- FREQUENCY = 100
|
|
|
|
- TIME_DIFF = 1 / FREQUENCY
|
|
|
|
- self.discomfort_duration = len(discomfort_time_list) * TIME_DIFF
|
|
|
|
-
|
|
|
|
- df['flag_zigzag'] = df['simTime'].apply(lambda x: 1 if x in zigzag_t_list else 0)
|
|
|
|
- df['flag_shake'] = df['simTime'].apply(lambda x: 1 if x in shake_t_list else 0)
|
|
|
|
- df['flag_cadence'] = df['simTime'].apply(lambda x: 1 if x in cadence_t_list else 0)
|
|
|
|
- df['flag_slam_brake'] = df['simTime'].apply(lambda x: 1 if x in slam_brake_t_list else 0)
|
|
|
|
- df['flag_slam_accel'] = df['simTime'].apply(lambda x: 1 if x in slam_accel_t_list else 0)
|
|
|
|
-
|
|
|
|
-
|
|
|
|
|
|
+ # 统计结果
|
|
self.calculated_value = {
|
|
self.calculated_value = {
|
|
"weaving": self.zigzag_count,
|
|
"weaving": self.zigzag_count,
|
|
"shake": self.shake_count,
|
|
"shake": self.shake_count,
|
|
@@ -601,38 +425,10 @@ class Comfort(object):
|
|
self.logger.info(f"舒适性计算完成,统计结果:{self.calculated_value}")
|
|
self.logger.info(f"舒适性计算完成,统计结果:{self.calculated_value}")
|
|
return self.calculated_value
|
|
return self.calculated_value
|
|
|
|
|
|
- def _nan_detect(self, num):
|
|
|
|
- if math.isnan(num):
|
|
|
|
- return 0
|
|
|
|
- return num
|
|
|
|
-
|
|
|
|
- def zip_time_pairs(self, zip_list):
|
|
|
|
- zip_time_pairs = zip(self.time_list, zip_list)
|
|
|
|
- zip_vs_time = [[x, "" if math.isnan(y) else y] for x, y in zip_time_pairs]
|
|
|
|
- return zip_vs_time
|
|
|
|
-
|
|
|
|
def report_statistic(self):
|
|
def report_statistic(self):
|
|
|
|
+ """生成舒适性评估报告"""
|
|
comfort_result = self.comf_statistic()
|
|
comfort_result = self.comf_statistic()
|
|
-
|
|
|
|
- # comfort_config_path = self.config_path / "comfort_config.yaml" #"comfort_config.yaml" # "comfort_config.yaml"
|
|
|
|
evaluator = Score(self.data_processed.comfort_config)
|
|
evaluator = Score(self.data_processed.comfort_config)
|
|
result = evaluator.evaluate(comfort_result)
|
|
result = evaluator.evaluate(comfort_result)
|
|
print("\n[舒适性表现及得分情况]")
|
|
print("\n[舒适性表现及得分情况]")
|
|
-
|
|
|
|
return result
|
|
return result
|
|
-
|
|
|
|
-if __name__ == '__main__':
|
|
|
|
- case_name = 'ICA'
|
|
|
|
- mode_label = 'PGVIL'
|
|
|
|
-
|
|
|
|
- data = data_process.DataPreprocessing(case_name, mode_label)
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- comfort_instance = Comfort(data)
|
|
|
|
- # 调用实例方法 report_statistic,它不接受除 self 之外的参数
|
|
|
|
- try:
|
|
|
|
- comfort_result = comfort_instance.report_statistic()
|
|
|
|
- result = {'comfort': comfort_result}
|
|
|
|
-
|
|
|
|
- except Exception as e:
|
|
|
|
- print(f"An error occurred in Comfort.report_statistic: {e}")
|
|
|