# -*- coding: utf-8 -*- import os import time import oss2 from rosbag import Bag, Compression key1 = 'pji/' key2 = 'data/' key3 = 'data_merge/' key4 = 'data_parse/' path1 = '/root/' sleep_time = 5 # 每多少秒扫描一次 compress_way = Compression.BZ2 def merge(local_bags, merged_prefix, local_merged_dir, merged_bag_name): try: output_bag_file = local_merged_dir + merged_bag_name with Bag(output_bag_file, 'w', compression=compress_way) as o: for i in range(len(local_bags)): with Bag(local_bags[i], 'r') as ib: for topic, msg, t in ib: o.write(topic, msg, t) bucket.put_object_from_file(merged_prefix + merged_bag_name, output_bag_file) return output_bag_file except Exception as e: print "bag包合并报错: %s" % str(e) # print(f'bag包合并报错{e}') ''' cname:http://open-bucket.oss.icvdc.com 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com oss桶名: open-bucket keyid:n8glvFGS25MrLY7j secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d ''' # ------- 获取未合并的bag包,合并 ------- if __name__ == '__main__': # 1 创建阿里云对象 auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d') # cname = 'http://open-bucket.oss.icvdc.com' # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True) endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com' bucket = oss2.Bucket(auth, endpoint, 'open-bucket') turn = 1 while True: # print(f'当前轮次{turn}扫描开始。') prefix_list = [] # 2 获取已经上传完成的所有目录并分组 upload_completed_prefix_list = [] for obj1 in oss2.ObjectIterator(bucket, prefix=key1, delimiter='/'): bag_list = [] target_path = '' if str(obj1.key).count('/') == 2: # pujin/robot-01/ # 6 校验bag包个数 for obj2 in oss2.ObjectIterator(bucket, prefix=str(obj1.key) + key2, delimiter='/'): if str(obj2.key).count('/') == 4: # pujin/robot-01/data/2023-12-20-02-16-56_obstacledetection_10/ bag_need = str(obj2.key).split('_')[-1][:-1] count_bag = 0 for obj3 in oss2.ObjectIterator(bucket, prefix=str(obj2.key)): if obj3.key[-3:] == 'bag': count_bag = count_bag + 1 if bag_need == str(count_bag): upload_completed_prefix_list.append(obj2.key) # 处理 for prefix in upload_completed_prefix_list: local_delete_list = [] oss_delete_list = [] # 获取要处理的包 oss_bags = [] local_bags = [] for obj in oss2.ObjectIterator(bucket, prefix=prefix): if str(obj.key).endswith('.bag'): oss_bags.append(str(obj.key)) split_list = prefix.replace(key2, key3).split('/') merged_prefix = '/'.join(split_list[0:-2]) + '/' local_merged_dir = path1 + merged_prefix print '合并文件上传key前缀为:', merged_prefix if not os.path.exists(local_merged_dir): os.makedirs(local_merged_dir) parse_prefix = ('/'.join(split_list[0:-2]) + '/').replace(key3, key4) local_parse_dir = path1 + parse_prefix print '解析文件上传key前缀为:', parse_prefix if not os.path.exists(local_parse_dir): os.makedirs(local_parse_dir) merged_bag_name = split_list[-2] parse_prefix = parse_prefix + merged_bag_name + '/' merged_bag_full_name = merged_bag_name + '.bag' local_bag_dir = path1 + '/'.join(split_list[0:-1]) + '/' if not os.path.exists(local_bag_dir): os.makedirs(local_bag_dir) # 下载 for oss_bag in oss_bags: bag_name = str(oss_bag).split('/')[-1] local_bag_path = local_bag_dir + bag_name bucket.get_object_to_file(oss_bag, local_bag_path) local_bags.append(local_bag_path) local_delete_list.append(local_bag_path) oss_delete_list.append(oss_bag) # 1 合并bag merged_bag_file_path = merge(local_bags, merged_prefix, local_merged_dir, merged_bag_full_name) local_delete_list.append(merged_bag_file_path) # 删除本地临时文件 if len(local_delete_list) > 0: for local_delete in local_delete_list: try: os.remove(local_delete) except Exception as e: print "删除本地临时文件: %s" % str(e) # print(f'删除本地临时文件{e}') # 删除oss原始文件 if len(oss_delete_list) > 0: bucket.batch_delete_objects(oss_delete_list) turn = turn + 1 time.sleep(sleep_time)