|
@@ -0,0 +1,173 @@
|
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
|
+import os
|
|
|
|
+import shutil
|
|
|
|
+import subprocess
|
|
|
|
+import time
|
|
|
|
+import oss2
|
|
|
|
+
|
|
|
|
+from resource import bagtocsv_robot
|
|
|
|
+
|
|
|
|
+import logging
|
|
|
|
+
|
|
|
|
+path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
|
|
|
|
+path2 = '/mnt/disk001/pdf_outdoor/run/'
|
|
|
|
+path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv/'
|
|
|
|
+logging.basicConfig(filename=path1 + 'log/2csv.log', level=logging.INFO,
|
|
|
|
+ format='%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
+
|
|
|
|
+key1 = 'pjibot_delivery/'
|
|
|
|
+sleep_time = 30 # 每多少秒扫描一次
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
|
|
|
|
+ try:
|
|
|
|
+ bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
|
|
|
|
+ bagname = data_bag.split('/')[-1].split('.')[0]
|
|
|
|
+ local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname) # 最终生成四个csv文件的目录
|
|
|
|
+
|
|
|
|
+ csv_file_name1 = 'trajectory_pji'
|
|
|
|
+ local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
|
|
|
|
+ oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
|
|
|
|
+ bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
|
|
|
|
+
|
|
|
|
+ csv_file_name2 = 'ego_pji'
|
|
|
|
+ local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
|
|
|
|
+ oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
|
|
|
|
+ bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
|
|
|
|
+
|
|
|
|
+ csv_file_name3 = 'pos_pji'
|
|
|
|
+ local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
|
|
|
|
+ oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
|
|
|
|
+ bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
|
|
|
|
+
|
|
|
|
+ csv_file_name4 = 'objects_pji'
|
|
|
|
+ local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
|
|
|
|
+ oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
|
|
|
|
+ bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
|
|
|
|
+
|
|
|
|
+ # 生成pdf
|
|
|
|
+ try:
|
|
|
|
+ os.chdir(path2)
|
|
|
|
+ # 构造命令
|
|
|
|
+ command1 = ['./pji_outdoor_real',
|
|
|
|
+ os.path.join(local_csv_dir, ''), # 注意:这里可能不需要末尾的 '/',取决于程序要求
|
|
|
|
+ os.path.join(local_csv_dir, ''), # 同上
|
|
|
|
+ os.path.join(local_csv_dir, 'trajectory.png'),
|
|
|
|
+ bagname]
|
|
|
|
+
|
|
|
|
+ # 记录调用命令的信息
|
|
|
|
+ logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
|
|
|
|
+
|
|
|
|
+ # 使用 subprocess.Popen 执行命令
|
|
|
|
+ process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
+ stdout, stderr = process.communicate() # 等待命令执行完成
|
|
|
|
+
|
|
|
|
+ # 检查是否有输出或错误
|
|
|
|
+ if stdout:
|
|
|
|
+ logging.info("命令的标准输出:")
|
|
|
|
+ logging.info(stdout.decode('utf-8')) # Python 2 中需要解码
|
|
|
|
+ if stderr:
|
|
|
|
+ logging.error("命令的错误输出:")
|
|
|
|
+ logging.error(stderr.decode('utf-8')) # Python 2 中需要解码
|
|
|
|
+
|
|
|
|
+ # 检查命令是否成功执行
|
|
|
|
+ if process.returncode == 0:
|
|
|
|
+ logging.info("命令执行成功")
|
|
|
|
+ else:
|
|
|
|
+ logging.error("命令执行失败,退出码: %s" % process.returncode)
|
|
|
|
+
|
|
|
|
+ except OSError as e:
|
|
|
|
+ # 如果更改目录失败或命令不存在等
|
|
|
|
+ logging.error("在执行过程中发生错误: %s" % e)
|
|
|
|
+
|
|
|
|
+ oss_csv_object_key5 = parse_prefix + 'report.pdf'
|
|
|
|
+ bucket.put_object_from_file(oss_csv_object_key5, str(local_csv_dir) + '/report.pdf')
|
|
|
|
+ logging.info("pdf 报告生成并上传完成。")
|
|
|
|
+
|
|
|
|
+ # 记得删除
|
|
|
|
+ local_delete_list.append(local_csv_file_path1)
|
|
|
|
+ local_delete_list.append(local_csv_file_path2)
|
|
|
|
+ local_delete_list.append(local_csv_file_path3)
|
|
|
|
+ local_delete_list.append(local_csv_file_path4)
|
|
|
|
+ local_delete_list.append(str(local_csv_dir) + '/report.pdf')
|
|
|
|
+
|
|
|
|
+ except Exception as e2:
|
|
|
|
+ # 当出现异常时执行的代码
|
|
|
|
+ logging.exception("生成csv报错: %s", e2)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+# ------- 获取合并之后的bag包,解析出csv -------
|
|
|
|
+if __name__ == '__main__':
|
|
|
|
+ # 1 创建阿里云对象
|
|
|
|
+ auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
|
|
|
|
+ endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
|
|
|
|
+ bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
|
|
|
|
+ while True:
|
|
|
|
+ logging.info("开始新一轮扫描")
|
|
|
|
+ try:
|
|
|
|
+ local_delete_list = []
|
|
|
|
+ oss_delete_list = []
|
|
|
|
+ prefix_list = []
|
|
|
|
+ # 2 获取已经上传完成的所有目录并分组
|
|
|
|
+ for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
|
|
|
|
+ # 获取合并后的包
|
|
|
|
+ merged_bag_object_key = str(obj1.key)
|
|
|
|
+ # print(f'判断1{merged_bag_object_key}')
|
|
|
|
+ if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'):
|
|
|
|
+ merged_bag_object_key_split = merged_bag_object_key.split('/')
|
|
|
|
+ merged_prefix = '/'.join(merged_bag_object_key_split[:-1])
|
|
|
|
+ parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
|
|
|
|
+ parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/'
|
|
|
|
+ callback_undone = False
|
|
|
|
+ csv1_done = False
|
|
|
|
+ csv2_done = False
|
|
|
|
+ csv3_done = False
|
|
|
|
+ csv4_done = False
|
|
|
|
+ pdf_done = False
|
|
|
|
+ for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):
|
|
|
|
+ if '/callback.json' in str(obj2.key):
|
|
|
|
+ callback_undone = True
|
|
|
|
+ if '/trajectory_pji.csv' in str(obj2.key):
|
|
|
|
+ csv1_done = True
|
|
|
|
+ if '/ego_pji.csv' in str(obj2.key):
|
|
|
|
+ csv2_done = True
|
|
|
|
+ if '/pos_pji.csv' in str(obj2.key):
|
|
|
|
+ csv3_done = True
|
|
|
|
+ if '/objects_pji.csv' in str(obj2.key):
|
|
|
|
+ csv4_done = True
|
|
|
|
+ if '/report.pdf' in str(obj2.key):
|
|
|
|
+ pdf_done = True
|
|
|
|
+ if not callback_undone: # 没有 callback.json
|
|
|
|
+ continue
|
|
|
|
+ if csv1_done and csv2_done and csv3_done and csv4_done and pdf_done:
|
|
|
|
+ continue
|
|
|
|
+
|
|
|
|
+ logging.info("开始生成场景还原csv: %s" % str(obj1.key))
|
|
|
|
+ local_merged_bag_path = path3 + merged_bag_object_key
|
|
|
|
+ local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
|
|
|
|
+ local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
|
|
|
|
+ if not os.path.exists(local_merged_dir):
|
|
|
|
+ os.makedirs(local_merged_dir)
|
|
|
|
+ if not os.path.exists(local_parse_dir):
|
|
|
|
+ os.makedirs(local_parse_dir)
|
|
|
|
+ merged_bag_full_name = merged_bag_object_key_split[-1]
|
|
|
|
+ merged_bag_name = merged_bag_full_name.split('.')[0]
|
|
|
|
+ try:
|
|
|
|
+ bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
|
|
|
|
+ except Exception as e:
|
|
|
|
+ logging.exception("下载合并后的bag包失败: %s" % str(e))
|
|
|
|
+
|
|
|
|
+ local_delete_list.append(local_merged_bag_path)
|
|
|
|
+ # 2 生成 pos_orig.csv 和 pos_hmi.csv
|
|
|
|
+ parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
|
|
|
|
+
|
|
|
|
+ # 删除本地临时文件
|
|
|
|
+ if len(local_delete_list) > 0:
|
|
|
|
+ for local_delete in local_delete_list:
|
|
|
|
+ try:
|
|
|
|
+ os.remove(local_delete)
|
|
|
|
+ except Exception as e:
|
|
|
|
+ logging.exception("捕获到一个异常: %s" % str(e))
|
|
|
|
+ except Exception as e:
|
|
|
|
+ logging.exception("全局错误处理: %s" % str(e))
|
|
|
|
+ time.sleep(sleep_time)
|