merge-pjibot_guide.py 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import time
  4. import oss2
  5. from rosbag import Bag, Compression
  6. import subprocess
  7. import logging
  8. import rosbag
  9. from std_msgs.msg import Header
  10. import rospy
  11. path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot/'
  12. logging.basicConfig(filename=path1 + 'log/merge-pjibot_guide.log', level=logging.INFO,
  13. format='%(asctime)s - %(levelname)s - %(message)s')
  14. key1 = 'pjibot/'
  15. key2 = 'data/'
  16. key3 = 'data_merge/'
  17. key4 = 'data_parse/'
  18. sleep_time = 30 # 每多少秒扫描一次
  19. compress_way = Compression.BZ2
  20. def merge(local_bags, merged_prefix, local_merged_dir, merged_bag_name):
  21. try:
  22. parse_prefix = merged_prefix.replace('data_merge', 'data_parse') + merged_bag_name.split('.')[0] + '/'
  23. output_bag_file = local_merged_dir + merged_bag_name
  24. with Bag(output_bag_file, 'w', compression=compress_way) as o:
  25. for i in range(len(local_bags)):
  26. with Bag(local_bags[i], 'r') as ib:
  27. for topic, msg, t in ib:
  28. o.write(topic, msg, t)
  29. bucket.put_object_from_file(merged_prefix + merged_bag_name, output_bag_file)
  30. # 获取只有1帧率的数据包
  31. output_bag_file2 = local_merged_dir + merged_bag_name + '.merge'
  32. tfstatic_key = merged_prefix.replace('data_merge/', '') + 'tfstatic.bag'
  33. local_tfstatic_path = local_merged_dir + 'tfstatic.bag'
  34. logging.info("tfstatic.bag包的key为: %s" % str(tfstatic_key))
  35. bucket.get_object_to_file(tfstatic_key, local_tfstatic_path)
  36. bag_a = rosbag.Bag(output_bag_file)
  37. bag_b = rosbag.Bag(local_merged_dir + 'tfstatic.bag')
  38. # compression_a = bag_a.get_compression()
  39. tf_msg = None
  40. for topic, msg, t in bag_b.read_messages(topics=['/tf_static']):
  41. tf_msg = msg
  42. if tf_msg:
  43. first_timestamp = bag_a.get_start_time()
  44. print(first_timestamp)
  45. header = Header()
  46. header.stamp = rospy.Time.from_sec(first_timestamp)
  47. for transform in tf_msg.transforms:
  48. transform.header.stamp = header.stamp
  49. with rosbag.Bag(output_bag_file2, 'w') as outbag:
  50. for topic, msg, t in bag_a.read_messages():
  51. outbag.write(topic, msg, t=rospy.Time.from_sec(t.to_sec()))
  52. outbag.write('/tf_static', tf_msg, t=header.stamp)
  53. bag_a.close()
  54. bag_b.close()
  55. build_map_key = parse_prefix + 'build_map.bag'
  56. bucket.put_object_from_file(build_map_key, output_bag_file2)
  57. # 根据合并后的包和map.bag 生成轨迹图片
  58. # 获取只有1帧率的数据包
  59. map_key = merged_prefix.replace('data_merge/', '') + 'map.bag'
  60. local_map_path = local_merged_dir + 'map.bag'
  61. logging.info("map.bag包的key为: %s" % str(map_key))
  62. bucket.get_object_to_file(map_key, local_map_path)
  63. jpg_file = local_merged_dir + merged_bag_name.split('.')[0] + '.jpg'
  64. # 构建命令
  65. command1 = ['rosrun', 'trajectory', 'demo_node', local_map_path, output_bag_file, jpg_file]
  66. # 记录命令
  67. logging.info("调用命令轨迹图片生成命令: %s" % ' '.join(command1))
  68. # 使用 subprocess.run 来执行命令,并捕获输出和错误
  69. result = subprocess.run(command1, capture_output=True, text=True)
  70. # 记录命令的标准输出
  71. if result.stdout:
  72. logging.info("命令输出: %s" % result.stdout.strip())
  73. # 记录命令的标准错误输出
  74. if result.stderr:
  75. logging.error("命令错误: %s" % result.stderr.strip())
  76. # 检查命令的返回码
  77. if result.returncode != 0:
  78. logging.error("命令执行失败,返回码: %d" % result.returncode)
  79. else:
  80. logging.info("命令执行成功")
  81. jpg_key = parse_prefix + 'track.png'
  82. bucket.put_object_from_file(jpg_key, jpg_file)
  83. costmap_key = merged_prefix.replace('data_merge/', '') + 'costmap.bag'
  84. logging.info("costmap.bag包的key为: %s" % str(costmap_key))
  85. local_costmap_path = local_merged_dir + 'costmap.bag'
  86. bucket.get_object_to_file(costmap_key, local_costmap_path)
  87. # 将三个单帧的包复制到指定采集数据目录一份
  88. bucket.put_object_from_file(parse_prefix + 'tfstatic.bag', local_tfstatic_path)
  89. bucket.put_object_from_file(parse_prefix + 'map.bag', local_map_path)
  90. bucket.put_object_from_file(parse_prefix + 'costmap.bag', local_costmap_path)
  91. return output_bag_file, output_bag_file2, jpg_file
  92. except Exception as e:
  93. logging.exception("bag包合并报错: %s" % str(e))
  94. '''
  95. cname:http://pji-bucket1.oss.icvdc.com
  96. 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
  97. keyid:n8glvFGS25MrLY7j
  98. secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
  99. oss桶名: pji-bucket1
  100. oss桶名: oss://pji-bucket1
  101. '''
  102. # ------- 获取未合并的bag包,合并 -------
  103. if __name__ == '__main__':
  104. # 1 创建阿里云对象
  105. auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
  106. # cname = 'http://open-bucket.oss.icvdc.com'
  107. # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True)
  108. endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
  109. bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
  110. while True:
  111. logging.info("开始新一轮扫描")
  112. try:
  113. # print(f'当前轮次{turn}扫描开始。')
  114. prefix_list = []
  115. # 2 获取已经上传完成的所有目录并分组
  116. upload_completed_prefix_list = []
  117. for obj1 in oss2.ObjectIterator(bucket, prefix=key1, delimiter='/'):
  118. bag_list = []
  119. target_path = ''
  120. if str(obj1.key).count('/') == 2: # pujin/robot-01/
  121. # 6 校验bag包个数
  122. for obj2 in oss2.ObjectIterator(bucket, prefix=str(obj1.key) + key2, delimiter='/'):
  123. if str(obj2.key).count(
  124. '/') == 4: # pujin/robot-01/data/2023-12-20-02-16-56_obstacledetection_10/
  125. bag_need = str(obj2.key).split('_')[-1][:-1]
  126. count_bag = 0
  127. for obj3 in oss2.ObjectIterator(bucket, prefix=str(obj2.key)):
  128. if obj3.key[-3:] == 'bag':
  129. count_bag = count_bag + 1
  130. if bag_need == str(count_bag):
  131. upload_completed_prefix_list.append(obj2.key)
  132. # 处理
  133. for prefix in upload_completed_prefix_list:
  134. local_delete_list = []
  135. oss_delete_list = []
  136. # 获取要处理的包
  137. oss_bags = []
  138. local_bags = []
  139. for obj in oss2.ObjectIterator(bucket, prefix=prefix):
  140. if str(obj.key).endswith('.bag'):
  141. oss_bags.append(str(obj.key))
  142. split_list = prefix.replace(key2, key3).split('/')
  143. merged_prefix = '/'.join(split_list[0:-2]) + '/'
  144. local_merged_dir = path1 + merged_prefix
  145. if not os.path.exists(local_merged_dir):
  146. os.makedirs(local_merged_dir)
  147. merged_bag_name = split_list[-2]
  148. merged_bag_full_name = merged_bag_name + '.bag'
  149. local_bag_dir = path1 + '/'.join(split_list[0:-1]) + '/'
  150. if not os.path.exists(local_bag_dir):
  151. os.makedirs(local_bag_dir)
  152. # 下载
  153. for oss_bag in oss_bags:
  154. bag_name = str(oss_bag).split('/')[-1]
  155. local_bag_path = local_bag_dir + bag_name
  156. bucket.get_object_to_file(oss_bag, local_bag_path)
  157. local_bags.append(local_bag_path)
  158. local_delete_list.append(local_bag_path)
  159. oss_delete_list.append(oss_bag)
  160. # 1 合并bag
  161. logging.info("合并文件,key前缀为: %s" % str(local_merged_dir))
  162. merged_bag_file_path, build_map_bag_local_path, jpg_file = merge(local_bags, merged_prefix,
  163. local_merged_dir, merged_bag_full_name)
  164. local_delete_list.append(merged_bag_file_path)
  165. local_delete_list.append(build_map_bag_local_path)
  166. local_delete_list.append(jpg_file)
  167. # 删除本地临时文件
  168. if len(local_delete_list) > 0:
  169. for local_delete in local_delete_list:
  170. try:
  171. os.remove(local_delete)
  172. except Exception as e:
  173. logging.exception("删除本地临时文件报错: %s" % str(e))
  174. # 删除oss原始文件
  175. if len(oss_delete_list) > 0:
  176. bucket.batch_delete_objects(oss_delete_list)
  177. time.sleep(sleep_time)
  178. except Exception as e:
  179. logging.exception("全局错误处理: %s" % str(e))