# -*- coding: utf-8 -*- import os import time import oss2 from rosbag import Bag, Compression import subprocess key1 = 'kinglong/' key2 = 'data/' key3 = 'data_merge/' key4 = 'data_parse/' path1 = '/root/' sleep_time = 5 # 每多少秒扫描一次 compress_way = Compression.BZ2 camera_topics = ['/camera_image', '/cam_res'] fusion_topics = ['/fusion/vis/velocity', '/cicv/lidartracking_moving_objects', '/cicv_location', '/cicv/lidarfusionmovingobject', '/cicv/lidardeeplearning_moving_objects', '/cicv/lidarcluster_moving_objects', '/cam_objects', '/points_concat', '/f_radar_objects', '/fusion/vis/box'] plan_topics = ['/cicv_amr_trajectory', '/cicv_location', '/map_polygon', '/reference_trajectory', '/tpperception', '/tprouteplan', '/jinlong_control_pub', '/vehicle_info ', '/tftrafficlight'] control_topics = ['/cicv_location_JL', '/jinlong_flag_pub', '/car_wheel', '/nodefault_info', '/AutoModeStatus', '/vehicle_info'] def is_upload_completed(bucket, prefix): target_number = str(prefix).split('_')[-1][:-1] count = 0 for obj in oss2.ObjectIterator(bucket, prefix=prefix): if obj.key != prefix: count += 1 return int(count) == int(target_number) def merge(local_bags, merged_prefix, local_merged_dir, merged_bag_name): try: parse_prefix = str(merged_prefix).replace('data_merge', 'data_parse') all_bag_path = local_merged_dir + merged_bag_name camera_bag_path = local_merged_dir + 'camera.bag' fusion_bag_path = local_merged_dir + 'fusion.bag' plan_bag_path = local_merged_dir + 'plan.bag' control_bag_path = local_merged_dir + 'control.bag' with Bag(all_bag_path, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: o.write(topic, msg, t) bucket.put_object_from_file(merged_prefix + merged_bag_name, all_bag_path) with Bag(camera_bag_path, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: if topic in camera_topics: o.write(topic, msg, t) bucket.put_object_from_file(parse_prefix + 'camera.bag', camera_bag_path) with Bag(fusion_bag_path, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: if topic in fusion_topics: o.write(topic, msg, t) bucket.put_object_from_file(parse_prefix + 'fusion.bag', fusion_bag_path) with Bag(plan_bag_path, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: if topic in plan_topics: o.write(topic, msg, t) bucket.put_object_from_file(parse_prefix + 'plan.bag', plan_bag_path) with Bag(control_bag_path, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: if topic in control_topics: o.write(topic, msg, t) bucket.put_object_from_file(parse_prefix + 'control.bag', control_bag_path) return all_bag_path, camera_bag_path, fusion_bag_path, plan_bag_path, control_bag_path except Exception as e: print "bag包合并报错: %s" % str(e) # print(f'bag包合并报错{e}') ''' cname:http://open-bucket.oss.icvdc.com 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com oss桶名: open-bucket keyid:n8glvFGS25MrLY7j secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d ''' # ------- 获取未合并的bag包,合并 ------- if __name__ == '__main__': # 1 创建阿里云对象 auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d') # cname = 'http://open-bucket.oss.icvdc.com' # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True) endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com' bucket = oss2.Bucket(auth, endpoint, 'open-bucket') turn = 1 while True: # print(f'当前轮次{turn}扫描开始。') prefix_list = [] # 2 获取已经上传完成的所有目录并分组 upload_completed_prefix_list_node1 = [] upload_completed_prefix_list_node2 = [] for obj1 in oss2.ObjectIterator(bucket, prefix=key1, delimiter='/'): if str(obj1.key).count('/') == 2: # pujin/robot-01/ # print(f'{obj1.key}') for obj2 in oss2.ObjectIterator(bucket, prefix=str(obj1.key) + key2, delimiter='/'): if str(obj2.key).count('/') == 4: # pujin/robot-01/data/node1_2023-12-20-02-16-56_obction_10/ if is_upload_completed(bucket, obj2.key): if 'node1' in obj2.key: upload_completed_prefix_list_node1.append(obj2.key) if 'node2' in obj2.key: upload_completed_prefix_list_node2.append(obj2.key) # 3 如果两个节点都上传完成则进行后续处理 for prefix_node1 in upload_completed_prefix_list_node1: local_delete_list = [] oss_delete_list = [] prefix_node2 = prefix_node1.replace('node1', 'node2') if prefix_node2 in upload_completed_prefix_list_node2: # print(f'需要合并{prefix_node1}和{prefix_node2}') # 获取要处理的包 oss_bags = [] local_bags = [] for obj in oss2.ObjectIterator(bucket, prefix=prefix_node1): if str(obj.key).endswith('.bag'): oss_bags.append(str(obj.key)) for obj in oss2.ObjectIterator(bucket, prefix=prefix_node2): if str(obj.key).endswith('.bag'): oss_bags.append(str(obj.key)) split_list = prefix_node1.replace(key2, key3).replace("node1_", "").split('/') merged_prefix = '/'.join(split_list[0:-2]) + '/' local_merged_dir = path1 + merged_prefix if not os.path.exists(local_merged_dir): os.makedirs(local_merged_dir) parse_prefix = ('/'.join(split_list[0:-2]) + '/').replace(key3, key4) local_parse_dir = path1 + parse_prefix if not os.path.exists(local_parse_dir): os.makedirs(local_parse_dir) merged_bag_name = split_list[-2] parse_prefix = parse_prefix + merged_bag_name + '/' merged_bag_full_name = merged_bag_name + '.bag' local_bag_dir = path1 + '/'.join(split_list[0:-1]) + '/' if not os.path.exists(local_bag_dir): os.makedirs(local_bag_dir) # 下载 for oss_bag in oss_bags: bag_name = str(oss_bag).split('/')[-1] local_bag_path = local_bag_dir + bag_name bucket.get_object_to_file(oss_bag, local_bag_path) local_bags.append(local_bag_path) local_delete_list.append(local_bag_path) oss_delete_list.append(oss_bag) # 1 合并bag all_bag_path, camera_bag_path, fusion_bag_path, plan_bag_path, control_bag_path = merge(local_bags, merged_prefix, local_merged_dir, merged_bag_full_name) local_delete_list = local_delete_list + [all_bag_path, camera_bag_path, fusion_bag_path, plan_bag_path, control_bag_path] # 删除本地临时文件 if len(local_delete_list) > 0: for local_delete in local_delete_list: try: os.remove(local_delete) except Exception as e: print "删除本地临时文件: %s" % str(e) # print(f'删除本地临时文件{e}') # 删除oss原始文件 if len(oss_delete_list) > 0: bucket.batch_delete_objects(oss_delete_list) turn = turn + 1 time.sleep(sleep_time)