merge.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import time
  4. import oss2
  5. from rosbag import Bag, Compression
  6. import logging
  7. import rosbag
  8. from std_msgs.msg import Header
  9. import rospy
  10. path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot/'
  11. logging.basicConfig(filename=path1 + 'log/pjibot_merge.log', level=logging.INFO,
  12. format='%(asctime)s - %(levelname)s - %(message)s')
  13. key1 = 'pjibot/'
  14. key2 = 'data/'
  15. key3 = 'data_merge/'
  16. key4 = 'data_parse/'
  17. sleep_time = 5 # 每多少秒扫描一次
  18. compress_way = Compression.BZ2
  19. def merge(local_bags, merged_prefix, local_merged_dir, merged_bag_name):
  20. try:
  21. parse_prefix = merged_prefix.replace('data_merge', 'data_parse') + merged_bag_name.split('.')[0] + '/'
  22. output_bag_file = local_merged_dir + merged_bag_name
  23. with Bag(output_bag_file, 'w', compression=compress_way) as o:
  24. for i in range(len(local_bags)):
  25. with Bag(local_bags[i], 'r') as ib:
  26. for topic, msg, t in ib:
  27. o.write(topic, msg, t)
  28. bucket.put_object_from_file(merged_prefix + merged_bag_name, output_bag_file)
  29. # 获取只有1帧率的数据包
  30. output_bag_file2 = local_merged_dir + merged_bag_name + '.merge'
  31. tfstatic_key = merged_prefix.replace('data_merge/', '') + 'tfstatic.bag'
  32. local_tfstatic_path = local_merged_dir + 'tfstatic.bag'
  33. logging.info("tfstatic.bag包的key为: %s" % str(tfstatic_key))
  34. bucket.get_object_to_file(tfstatic_key, local_tfstatic_path)
  35. bag_a = rosbag.Bag(output_bag_file)
  36. bag_b = rosbag.Bag(local_merged_dir + 'tfstatic.bag')
  37. # compression_a = bag_a.get_compression()
  38. tf_msg = None
  39. for topic, msg, t in bag_b.read_messages(topics=['/tf_static']):
  40. tf_msg = msg
  41. if tf_msg:
  42. first_timestamp = bag_a.get_start_time()
  43. print(first_timestamp)
  44. header = Header()
  45. header.stamp = rospy.Time.from_sec(first_timestamp)
  46. for transform in tf_msg.transforms:
  47. transform.header.stamp = header.stamp
  48. with rosbag.Bag(output_bag_file2, 'w') as outbag:
  49. for topic, msg, t in bag_a.read_messages():
  50. outbag.write(topic, msg, t=rospy.Time.from_sec(t.to_sec()))
  51. outbag.write('/tf_static', tf_msg, t=header.stamp)
  52. bag_a.close()
  53. bag_b.close()
  54. build_map_key = parse_prefix + 'build_map.bag'
  55. bucket.put_object_from_file(build_map_key, output_bag_file2)
  56. # 根据合并后的包和map.bag 生成轨迹图片
  57. # 获取只有1帧率的数据包
  58. map_key = merged_prefix.replace('data_merge/', '') + 'map.bag'
  59. local_map_path = local_merged_dir + 'map.bag'
  60. logging.info("map.bag包的key为: %s" % str(map_key))
  61. bucket.get_object_to_file(map_key, local_map_path)
  62. jpg_file = local_merged_dir + merged_bag_name.split('.')[0] + '.jpg'
  63. command1 = 'rosrun trajectory demo_node ' + local_map_path + ' ' + output_bag_file + ' ' + jpg_file
  64. logging.info("调用命令轨迹图片生成命令: %s" % str(command1))
  65. os.system(command1)
  66. jpg_key = parse_prefix + 'track.png'
  67. bucket.put_object_from_file(jpg_key, jpg_file)
  68. costmap_key = merged_prefix.replace('data_merge/', '') + 'costmap.bag'
  69. logging.info("costmap.bag包的key为: %s" % str(costmap_key))
  70. local_costmap_path = local_merged_dir + 'costmap.bag'
  71. bucket.get_object_to_file(costmap_key, local_costmap_path)
  72. # 将三个单帧的包复制到指定采集数据目录一份
  73. bucket.put_object_from_file(parse_prefix + 'tfstatic.bag', local_tfstatic_path)
  74. bucket.put_object_from_file(parse_prefix + 'map.bag', local_map_path)
  75. bucket.put_object_from_file(parse_prefix + 'costmap.bag', local_costmap_path)
  76. return output_bag_file, output_bag_file2, jpg_file
  77. except Exception as e:
  78. logging.exception("bag包合并报错: %s" % str(e))
  79. '''
  80. cname:http://pji-bucket1.oss.icvdc.com
  81. 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
  82. keyid:n8glvFGS25MrLY7j
  83. secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
  84. oss桶名: pji-bucket1
  85. oss桶名: oss://pji-bucket1
  86. '''
  87. # ------- 获取未合并的bag包,合并 -------
  88. if __name__ == '__main__':
  89. # 1 创建阿里云对象
  90. auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
  91. # cname = 'http://open-bucket.oss.icvdc.com'
  92. # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True)
  93. endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
  94. bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
  95. turn = 1
  96. while True:
  97. logging.info("当前轮次: %s" % str(turn))
  98. turn += 1
  99. try:
  100. # print(f'当前轮次{turn}扫描开始。')
  101. prefix_list = []
  102. # 2 获取已经上传完成的所有目录并分组
  103. upload_completed_prefix_list = []
  104. for obj1 in oss2.ObjectIterator(bucket, prefix=key1, delimiter='/'):
  105. bag_list = []
  106. target_path = ''
  107. if str(obj1.key).count('/') == 2: # pujin/robot-01/
  108. # 6 校验bag包个数
  109. for obj2 in oss2.ObjectIterator(bucket, prefix=str(obj1.key) + key2, delimiter='/'):
  110. if str(obj2.key).count(
  111. '/') == 4: # pujin/robot-01/data/2023-12-20-02-16-56_obstacledetection_10/
  112. bag_need = str(obj2.key).split('_')[-1][:-1]
  113. count_bag = 0
  114. for obj3 in oss2.ObjectIterator(bucket, prefix=str(obj2.key)):
  115. if obj3.key[-3:] == 'bag':
  116. count_bag = count_bag + 1
  117. if bag_need == str(count_bag):
  118. upload_completed_prefix_list.append(obj2.key)
  119. # 处理
  120. for prefix in upload_completed_prefix_list:
  121. local_delete_list = []
  122. oss_delete_list = []
  123. # 获取要处理的包
  124. oss_bags = []
  125. local_bags = []
  126. for obj in oss2.ObjectIterator(bucket, prefix=prefix):
  127. if str(obj.key).endswith('.bag'):
  128. oss_bags.append(str(obj.key))
  129. split_list = prefix.replace(key2, key3).split('/')
  130. merged_prefix = '/'.join(split_list[0:-2]) + '/'
  131. local_merged_dir = path1 + merged_prefix
  132. if not os.path.exists(local_merged_dir):
  133. os.makedirs(local_merged_dir)
  134. merged_bag_name = split_list[-2]
  135. merged_bag_full_name = merged_bag_name + '.bag'
  136. local_bag_dir = path1 + '/'.join(split_list[0:-1]) + '/'
  137. if not os.path.exists(local_bag_dir):
  138. os.makedirs(local_bag_dir)
  139. # 下载
  140. for oss_bag in oss_bags:
  141. bag_name = str(oss_bag).split('/')[-1]
  142. local_bag_path = local_bag_dir + bag_name
  143. bucket.get_object_to_file(oss_bag, local_bag_path)
  144. local_bags.append(local_bag_path)
  145. local_delete_list.append(local_bag_path)
  146. oss_delete_list.append(oss_bag)
  147. # 1 合并bag
  148. logging.info("合并文件,key前缀为: %s" % str(local_merged_dir))
  149. merged_bag_file_path, build_map_bag_local_path, jpg_file = merge(local_bags, merged_prefix,
  150. local_merged_dir, merged_bag_full_name)
  151. local_delete_list.append(merged_bag_file_path)
  152. local_delete_list.append(build_map_bag_local_path)
  153. local_delete_list.append(jpg_file)
  154. # 删除本地临时文件
  155. if len(local_delete_list) > 0:
  156. for local_delete in local_delete_list:
  157. try:
  158. os.remove(local_delete)
  159. except Exception as e:
  160. logging.exception("删除本地临时文件报错: %s" % str(e))
  161. # 删除oss原始文件
  162. if len(oss_delete_list) > 0:
  163. bucket.batch_delete_objects(oss_delete_list)
  164. time.sleep(sleep_time)
  165. except Exception as e:
  166. logging.exception("全局错误处理: %s" % str(e))