123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205 |
- # -*- coding: utf-8 -*-
- import os
- import time
- import oss2
- from rosbag import Bag, Compression
- import subprocess
- import logging
- import rosbag
- from std_msgs.msg import Header
- import rospy
- path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot/'
- logging.basicConfig(filename=path1 + 'log/merge-pjibot_guide.log', level=logging.INFO,
- format='%(asctime)s - %(levelname)s - %(message)s')
- key1 = 'pjibot/'
- key2 = 'data/'
- key3 = 'data_merge/'
- key4 = 'data_parse/'
- sleep_time = 30 # 每多少秒扫描一次
- compress_way = Compression.BZ2
- def merge(local_bags, merged_prefix, local_merged_dir, merged_bag_name):
- try:
- parse_prefix = merged_prefix.replace('data_merge', 'data_parse') + merged_bag_name.split('.')[0] + '/'
- output_bag_file = local_merged_dir + merged_bag_name
- with Bag(output_bag_file, 'w', compression=compress_way) as o:
- for i in range(len(local_bags)):
- with Bag(local_bags[i], 'r') as ib:
- for topic, msg, t in ib:
- o.write(topic, msg, t)
- bucket.put_object_from_file(merged_prefix + merged_bag_name, output_bag_file)
- # 获取只有1帧率的数据包
- output_bag_file2 = local_merged_dir + merged_bag_name + '.merge'
- tfstatic_key = merged_prefix.replace('data_merge/', '') + 'tfstatic.bag'
- local_tfstatic_path = local_merged_dir + 'tfstatic.bag'
- logging.info("tfstatic.bag包的key为: %s" % str(tfstatic_key))
- bucket.get_object_to_file(tfstatic_key, local_tfstatic_path)
- bag_a = rosbag.Bag(output_bag_file)
- bag_b = rosbag.Bag(local_merged_dir + 'tfstatic.bag')
- # compression_a = bag_a.get_compression()
- tf_msg = None
- for topic, msg, t in bag_b.read_messages(topics=['/tf_static']):
- tf_msg = msg
- if tf_msg:
- first_timestamp = bag_a.get_start_time()
- print(first_timestamp)
- header = Header()
- header.stamp = rospy.Time.from_sec(first_timestamp)
- for transform in tf_msg.transforms:
- transform.header.stamp = header.stamp
- with rosbag.Bag(output_bag_file2, 'w') as outbag:
- for topic, msg, t in bag_a.read_messages():
- outbag.write(topic, msg, t=rospy.Time.from_sec(t.to_sec()))
- outbag.write('/tf_static', tf_msg, t=header.stamp)
- bag_a.close()
- bag_b.close()
- build_map_key = parse_prefix + 'build_map.bag'
- bucket.put_object_from_file(build_map_key, output_bag_file2)
- # 根据合并后的包和map.bag 生成轨迹图片
- # 获取只有1帧率的数据包
- map_key = merged_prefix.replace('data_merge/', '') + 'map.bag'
- local_map_path = local_merged_dir + 'map.bag'
- logging.info("map.bag包的key为: %s" % str(map_key))
- bucket.get_object_to_file(map_key, local_map_path)
- jpg_file = local_merged_dir + merged_bag_name.split('.')[0] + '.jpg'
- # 构建命令
- command1 = ['rosrun', 'trajectory', 'demo_node', local_map_path, output_bag_file, jpg_file]
- # 记录命令
- logging.info("调用命令轨迹图片生成命令: %s" % ' '.join(command1))
- # 使用 subprocess.run 来执行命令,并捕获输出和错误
- result = subprocess.run(command1, capture_output=True, text=True)
- # 记录命令的标准输出
- if result.stdout:
- logging.info("命令输出: %s" % result.stdout.strip())
- # 记录命令的标准错误输出
- if result.stderr:
- logging.error("命令错误: %s" % result.stderr.strip())
- # 检查命令的返回码
- if result.returncode != 0:
- logging.error("命令执行失败,返回码: %d" % result.returncode)
- else:
- logging.info("命令执行成功")
- jpg_key = parse_prefix + 'track.png'
- bucket.put_object_from_file(jpg_key, jpg_file)
- costmap_key = merged_prefix.replace('data_merge/', '') + 'costmap.bag'
- logging.info("costmap.bag包的key为: %s" % str(costmap_key))
- local_costmap_path = local_merged_dir + 'costmap.bag'
- bucket.get_object_to_file(costmap_key, local_costmap_path)
- # 将三个单帧的包复制到指定采集数据目录一份
- bucket.put_object_from_file(parse_prefix + 'tfstatic.bag', local_tfstatic_path)
- bucket.put_object_from_file(parse_prefix + 'map.bag', local_map_path)
- bucket.put_object_from_file(parse_prefix + 'costmap.bag', local_costmap_path)
- return output_bag_file, output_bag_file2, jpg_file
- except Exception as e:
- logging.exception("bag包合并报错: %s" % str(e))
- '''
- cname:http://pji-bucket1.oss.icvdc.com
- 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
- keyid:n8glvFGS25MrLY7j
- secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
- oss桶名: pji-bucket1
- oss桶名: oss://pji-bucket1
- '''
- # ------- 获取未合并的bag包,合并 -------
- if __name__ == '__main__':
- # 1 创建阿里云对象
- auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
- # cname = 'http://open-bucket.oss.icvdc.com'
- # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True)
- endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
- bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
- while True:
- logging.info("开始新一轮扫描")
- try:
- # print(f'当前轮次{turn}扫描开始。')
- prefix_list = []
- # 2 获取已经上传完成的所有目录并分组
- upload_completed_prefix_list = []
- for obj1 in oss2.ObjectIterator(bucket, prefix=key1, delimiter='/'):
- bag_list = []
- target_path = ''
- if str(obj1.key).count('/') == 2: # pujin/robot-01/
- # 6 校验bag包个数
- for obj2 in oss2.ObjectIterator(bucket, prefix=str(obj1.key) + key2, delimiter='/'):
- if str(obj2.key).count(
- '/') == 4: # pujin/robot-01/data/2023-12-20-02-16-56_obstacledetection_10/
- bag_need = str(obj2.key).split('_')[-1][:-1]
- count_bag = 0
- for obj3 in oss2.ObjectIterator(bucket, prefix=str(obj2.key)):
- if obj3.key[-3:] == 'bag':
- count_bag = count_bag + 1
- if bag_need == str(count_bag):
- upload_completed_prefix_list.append(obj2.key)
- # 处理
- for prefix in upload_completed_prefix_list:
- local_delete_list = []
- oss_delete_list = []
- # 获取要处理的包
- oss_bags = []
- local_bags = []
- for obj in oss2.ObjectIterator(bucket, prefix=prefix):
- if str(obj.key).endswith('.bag'):
- oss_bags.append(str(obj.key))
- split_list = prefix.replace(key2, key3).split('/')
- merged_prefix = '/'.join(split_list[0:-2]) + '/'
- local_merged_dir = path1 + merged_prefix
- if not os.path.exists(local_merged_dir):
- os.makedirs(local_merged_dir)
- merged_bag_name = split_list[-2]
- merged_bag_full_name = merged_bag_name + '.bag'
- local_bag_dir = path1 + '/'.join(split_list[0:-1]) + '/'
- if not os.path.exists(local_bag_dir):
- os.makedirs(local_bag_dir)
- # 下载
- for oss_bag in oss_bags:
- bag_name = str(oss_bag).split('/')[-1]
- local_bag_path = local_bag_dir + bag_name
- bucket.get_object_to_file(oss_bag, local_bag_path)
- local_bags.append(local_bag_path)
- local_delete_list.append(local_bag_path)
- oss_delete_list.append(oss_bag)
- # 1 合并bag
- logging.info("合并文件,key前缀为: %s" % str(local_merged_dir))
- merged_bag_file_path, build_map_bag_local_path, jpg_file = merge(local_bags, merged_prefix,
- local_merged_dir, merged_bag_full_name)
- local_delete_list.append(merged_bag_file_path)
- local_delete_list.append(build_map_bag_local_path)
- local_delete_list.append(jpg_file)
- # 删除本地临时文件
- if len(local_delete_list) > 0:
- for local_delete in local_delete_list:
- try:
- os.remove(local_delete)
- except Exception as e:
- logging.exception("删除本地临时文件报错: %s" % str(e))
- # 删除oss原始文件
- if len(oss_delete_list) > 0:
- bucket.batch_delete_objects(oss_delete_list)
- time.sleep(sleep_time)
- except Exception as e:
- logging.exception("全局错误处理: %s" % str(e))
|