# -*- coding: utf-8 -*- import os import time import oss2 import xml.etree.ElementTree as ET from rosbag import Bag, Compression import shutil import docker import bagtocsv_orig import bagtocsv_hmi import get_drive import parse_jinlong_image import pcdtovideo_jilong_overlook import pcdtovideo_jilong_forwardlook key1 = 'kinglong/' path1 = '/root/' sleep_time = 5 # 每多少秒扫描一次 def parse_to_csv(merged_bag_file_path, parse_prefix, local_parse_dir, local_delete_list): try: local_csv_dir = bagtocsv_orig.parse(merged_bag_file_path, local_parse_dir) local_csv_dir = bagtocsv_hmi.parse(merged_bag_file_path, local_parse_dir) local_csv_dir = get_drive.parse(merged_bag_file_path, local_parse_dir) csv_file_name1 = 'pos_orig' csv_file_name2 = 'pos_hmi' csv_file_name3 = 'drive' local_delete_list.append(local_csv_dir + '/' + csv_file_name1 + '.csv') local_delete_list.append(local_csv_dir + '/' + csv_file_name2 + '.csv') local_delete_list.append(local_csv_dir + '/' + csv_file_name3 + '.csv') oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv' oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv' oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv' # print(f'上传文件到{oss_csv_object_key1},{oss_csv_object_key2},{oss_csv_object_key3}') bucket.put_object_from_file(oss_csv_object_key1, local_csv_dir + '/' + csv_file_name1 + '.csv') bucket.put_object_from_file(oss_csv_object_key2, local_csv_dir + '/' + csv_file_name2 + '.csv') bucket.put_object_from_file(oss_csv_object_key3, local_csv_dir + '/' + csv_file_name3 + '.csv') return local_parse_dir except Exception as e: print "生成csv报错: %s" % str(e) # print(f'生成csv报错{e}') ''' cname:http://open-bucket.oss.icvdc.com 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com oss桶名: open-bucket keyid:n8glvFGS25MrLY7j secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d ''' # ------- 获取合并之后的bag包,解析出csv ------- if __name__ == '__main__': # 1 创建阿里云对象 auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d') # cname = 'http://open-bucket.oss.icvdc.com' # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True) endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com' bucket = oss2.Bucket(auth, endpoint, 'open-bucket') while True: local_delete_list = [] oss_delete_list = [] prefix_list = [] # 2 获取已经上传完成的所有目录并分组 for obj1 in oss2.ObjectIterator(bucket, prefix=key1): # 获取合并后的包 merged_bag_object_key = str(obj1.key) # print(f'判断1{merged_bag_object_key}') if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'): # print(f'需要解析{merged_bag_object_key}') merged_bag_object_key_split = merged_bag_object_key.split('/') merged_prefix = '/'.join(merged_bag_object_key_split[:-1]) parse_prefix = merged_prefix.replace('data_merge', 'data_parse') parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/' csv1_done = False csv2_done = False csv3_done = False for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): if '/pos_orig.csv' in str(obj2.key): csv1_done = True if '/pos_hmi.csv' in str(obj2.key): csv2_done = True if '/drive.csv' in str(obj2.key): csv3_done = True if csv1_done and csv2_done and csv3_done: continue print "需要解析: %s" % merged_bag_object_key # print(f'需要解析{merged_bag_object_key}') local_merged_bag_path = path1 +'csv/'+ merged_bag_object_key local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1]) local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse') if not os.path.exists(local_merged_dir): os.makedirs(local_merged_dir) if not os.path.exists(local_parse_dir): os.makedirs(local_parse_dir) merged_bag_full_name = merged_bag_object_key_split[-1] merged_bag_name = merged_bag_full_name.split('.')[0] bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path) local_delete_list.append(local_merged_bag_path) # 2 生成 pos_orig.csv 和 pos_hmi.csv parse_to_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list) # 删除本地临时文件 if len(local_delete_list) > 0: for local_delete in local_delete_list: try: os.remove(local_delete) except Exception as e: print "删除本地临时文件: %s" % str(e) # print(f'删除本地临时文件{e}') time.sleep(sleep_time)