123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212 |
- # -*- coding: utf-8 -*-
- # ------- 全局配置 -------
- import sys
- reload(sys)
- sys.setdefaultencoding('utf8')
- import os
- import subprocess
- import time
- import oss2
- import json
- import io
- import logging
- from resource import bagtocsv_robot
- from utils import json_utils
- # 创建阿里云对象
- auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
- endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
- bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
- path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
- path2 = '/mnt/disk001/pdf_outdoor/run/'
- path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv/'
- logging.basicConfig(filename=path1 + 'log/csv-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
- key1 = 'pjibot_delivery/'
- sleep_time = 30 # 每多少秒扫描一次
- error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv-errorBag.json"
- # ------- 全局配置 -------
-
- def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
- try:
- bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
- bagname = data_bag.split('/')[-1].split('.')[0]
- local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname) # 最终生成四个csv文件和output.json的目录
- # ------- 处理 output.json - 开始 -------
- outputs = []
- try:
- output_json_path = str(local_csv_dir)+'/output.json'
- if os.path.exists(output_json_path):
- outputs = json_utils.parse_json_to_string_array(output_json_path)
- # 2 将 output.json 添加到 callback.json 的 check 字段
- callback_json_oss_key = parse_prefix+'callback.json'
- callback_json_local = local_csv_dir+'/callback.json'
- bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
- with io.open(callback_json_local, 'r', encoding='utf-8') as f:
- data = json.load(f)
- if 'check' not in data:
- data['check'] = []
- data['check'].extend(outputs)
- data['check'] = list(set(data['check'])) # 去重
- json_data = json.dumps(data, ensure_ascii=False, indent=4)
- with io.open(callback_json_local, 'w', encoding='utf-8') as f:
- f.write(unicode(json_data))
- bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
- except Exception as e3:
- pass
- # ------- 处理 output.json - 结束 -------
- csv_file_name1 = 'trajectory_pji'
- local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
- oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
- if os.path.exists(local_csv_file_path1):
- bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
- else:
- logging.error("没有 trajectory_pji.csv")
- json_utils.add_error(parse_prefix,error_bag_json)
- csv_file_name2 = 'ego_pji'
- local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
- oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
- if os.path.exists(local_csv_file_path2):
- bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
- else:
- logging.error("没有 ego_pji.csv")
- json_utils.add_error(parse_prefix,error_bag_json)
- if '目标点缺失' in outputs:
- logging.error("报错【目标点缺失】,不上传targetposition.csv了")
- json_utils.add_error(parse_prefix,error_bag_json)
- else:
- csv_file_name3 = 'targetposition'
- local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
- oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
- if os.path.exists(local_csv_file_path3):
- bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
- else:
- logging.error("没有 targetposition.csv")
- json_utils.add_error(parse_prefix,error_bag_json)
- csv_file_name4 = 'objects_pji'
- local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
- oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
- if os.path.exists(local_csv_file_path4):
- bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
- else:
- logging.error("没有 objects_pji.csv")
- json_utils.add_error(parse_prefix,error_bag_json)
-
- csv_file_name5 = 'drive'
- local_csv_file_path5 = str(local_csv_dir) + '/' + str(csv_file_name5) + '.csv'
- oss_csv_object_key5 = parse_prefix + csv_file_name5 + '.csv'
- if os.path.exists(local_csv_file_path5):
- bucket.put_object_from_file(oss_csv_object_key5, local_csv_file_path5)
- else:
- logging.error("没有 drive.csv")
- json_utils.add_error(parse_prefix,error_bag_json)
- # ------- 生成pdf - 开始 -------
- pdf_local_path = str(local_csv_dir) + '/report.pdf'
- can_pdf = True
- for output in outputs:
- if str(output) in ['自车数据缺失','无规划路径']:
- logging.error("【自车数据缺失】或【无规划路径】导致无法生成评价报告PDF")
- can_pdf = False
- if can_pdf:
- os.chdir(path2)
- command1 = ['./pji_outdoor_real',
- os.path.join(local_csv_dir, ''), # 注意:这里可能不需要末尾的 '/',取决于程序要求
- os.path.join(local_csv_dir, ''), # 同上
- os.path.join(local_csv_dir, 'trajectory.png'),
- bagname]
- logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
- process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = process.communicate() # 等待命令执行完成
- if stdout:
- logging.info("命令的标准输出:")
- logging.info(stdout.decode('utf-8')) # Python 2 中需要解码
- if stderr:
- logging.error("命令的错误输出:")
- logging.error(stderr.decode('utf-8')) # Python 2 中需要解码
- if process.returncode != 0:
- logging.error("命令执行失败,退出码: %s" % process.returncode)
- oss_csv_object_key5 = parse_prefix + 'report.pdf'
- bucket.put_object_from_file(oss_csv_object_key5, pdf_local_path)
- logging.info("pdf 报告生成并上传完成。")
- # ------- 生成pdf - 结束 -------
-
- # 记得删除
- local_delete_list.append(local_csv_file_path1)
- local_delete_list.append(local_csv_file_path2)
- local_delete_list.append(local_csv_file_path4)
- local_delete_list.append(output_json_path)
- local_delete_list.append(pdf_local_path)
- local_delete_list.append(str(local_csv_dir) + '/trajectory.png')
- except Exception as e2:
- logging.exception("生成csv报错: %s", e2)
- json_utils.add_error(parse_prefix,error_bag_json)
- if __name__ == '__main__':
- while True:
- logging.info("开始新一轮扫描:%s " % key1)
- try:
- local_delete_list = []
- oss_delete_list = []
- prefix_list = []
- # 2 获取已经上传完成的所有目录并分组
- for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
- if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'): # data_merge下的bag是等待解析的
- # 获取合并后的包
- merged_bag_object_key = str(obj1.key)
- merged_bag_object_key_split = merged_bag_object_key.split('/')
- merged_prefix = '/'.join(merged_bag_object_key_split[:-1]) # data_merge 目录
- parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
- parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse').replace('.bag',
- '/') # data_parse 目录
- csv1_done = False
- csv2_done = False
- csv3_done = False
- csv4_done = False
- csv5_done = False
- for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): # 判断 data_parse 目录下是否有解析后的文件
- if '/trajectory_pji.csv' in str(obj2.key):
- csv1_done = True
- if '/ego_pji.csv' in str(obj2.key):
- csv2_done = True
- if '/targetposition.csv' in str(obj2.key):
- csv3_done = True
- if '/objects_pji.csv' in str(obj2.key):
- csv4_done = True
- if '/drive.csv' in str(obj2.key):
- csv5_done = True
- if csv1_done and csv2_done and csv3_done and csv4_done and csv5_done:
- continue
- error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
- if parse_prefix_full in error_bag_list:
- continue
- logging.info("------- 生成场景还原csv - 开始: %s -------" % str(obj1.key))
- local_merged_bag_path = path3 + merged_bag_object_key
- local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
- local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
- if not os.path.exists(local_merged_dir):
- os.makedirs(local_merged_dir)
- if not os.path.exists(local_parse_dir):
- os.makedirs(local_parse_dir)
- merged_bag_full_name = merged_bag_object_key_split[-1]
- merged_bag_name = merged_bag_full_name.split('.')[0]
- bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
- local_delete_list.append(local_merged_bag_path)
- # 2 生成 pos_orig.csv 和 pos_hmi.csv
- parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
- logging.info("------- 生成场景还原csv - 结束: %s -------" % str(obj1.key))
- # 删除本地临时文件
- if len(local_delete_list) > 0:
- for local_delete in local_delete_list:
- try:
- os.remove(local_delete)
- except Exception as e:
- pass
- except Exception as e:
- logging.exception("全局错误处理: %s" % str(e))
- time.sleep(sleep_time)
|