csv-pjibot_delivery.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import shutil
  4. import subprocess
  5. import time
  6. import oss2
  7. import json
  8. from resource import bagtocsv_robot
  9. import logging
  10. path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
  11. path2 = '/mnt/disk001/pdf_outdoor/run/'
  12. path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv/'
  13. logging.basicConfig(filename=path1 + 'log/csv-pjibot_delivery.log', level=logging.INFO,
  14. format='%(asctime)s - %(levelname)s - %(message)s')
  15. key1 = 'pjibot_delivery/'
  16. sleep_time = 30 # 每多少秒扫描一次
  17. error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv-errorBag.json"
  18. def parse_json_to_string_array(file_path):
  19. try:
  20. # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
  21. with open(file_path, 'r') as file:
  22. # 读取文件内容
  23. file_content = file.read()
  24. # 解析JSON内容(Python 2中json.loads用于解析字符串)
  25. data = json.loads(file_content.decode('utf-8')) # 假设文件是UTF-8编码,这里需要手动解码
  26. # 检查数据是否是一个列表,并且列表中的元素是否是字符串
  27. if isinstance(data, list):
  28. for item in data:
  29. if not isinstance(item, basestring): # Python 2中字符串类型包括str和unicode,用basestring检查
  30. raise ValueError("JSON数组中的元素不是字符串")
  31. return data
  32. else:
  33. return []
  34. except Exception as e:
  35. return []
  36. def list_to_json_file(data, file_path):
  37. """
  38. 将列表转换为JSON格式并写入指定的文件路径。
  39. 如果文件已存在,则覆盖它。
  40. 参数:
  41. data (list): 要转换为JSON的列表。
  42. file_path (str): 要写入JSON数据的文件路径。
  43. """
  44. # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
  45. json_data = json.dumps(data, ensure_ascii=False, indent=4)
  46. json_data_utf8 = json_data.encode('utf-8') # 编码为UTF-8
  47. # 以写入模式打开文件,如果文件已存在则覆盖
  48. with open(file_path, 'w') as file:
  49. # 将UTF-8编码的JSON字符串写入文件
  50. file.write(json_data_utf8)
  51. def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
  52. try:
  53. bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
  54. bagname = data_bag.split('/')[-1].split('.')[0]
  55. local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname) # 最终生成四个csv文件的目录
  56. csv_file_name1 = 'trajectory_pji'
  57. local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
  58. oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
  59. bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
  60. csv_file_name2 = 'ego_pji'
  61. local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
  62. oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
  63. bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
  64. csv_file_name3 = 'pos_pji'
  65. local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
  66. oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
  67. bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
  68. csv_file_name4 = 'objects_pji'
  69. local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
  70. oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
  71. bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
  72. # 生成pdf
  73. try:
  74. # 设置调用目录
  75. os.chdir(path2)
  76. # 构造命令
  77. command1 = ['./pji_outdoor_real',
  78. os.path.join(local_csv_dir, ''), # 注意:这里可能不需要末尾的 '/',取决于程序要求
  79. os.path.join(local_csv_dir, ''), # 同上
  80. os.path.join(local_csv_dir, 'trajectory.png'),
  81. bagname]
  82. # 记录调用命令的信息
  83. logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
  84. # 使用 subprocess.Popen 执行命令
  85. process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  86. stdout, stderr = process.communicate() # 等待命令执行完成
  87. # 检查是否有输出或错误
  88. if stdout:
  89. logging.info("命令的标准输出:")
  90. logging.info(stdout.decode('utf-8')) # Python 2 中需要解码
  91. if stderr:
  92. logging.error("命令的错误输出:")
  93. logging.error(stderr.decode('utf-8')) # Python 2 中需要解码
  94. # 检查命令是否成功执行
  95. if process.returncode == 0:
  96. logging.info("命令执行成功")
  97. else:
  98. logging.error("命令执行失败,退出码: %s" % process.returncode)
  99. except OSError as e:
  100. # 如果更改目录失败或命令不存在等
  101. logging.error("在执行过程中发生错误: %s" % e)
  102. oss_csv_object_key5 = parse_prefix + 'report.pdf'
  103. bucket.put_object_from_file(oss_csv_object_key5, str(local_csv_dir) + '/report.pdf')
  104. logging.info("pdf 报告生成并上传完成。")
  105. # 记得删除
  106. local_delete_list.append(local_csv_file_path1)
  107. local_delete_list.append(local_csv_file_path2)
  108. local_delete_list.append(local_csv_file_path3)
  109. local_delete_list.append(local_csv_file_path4)
  110. local_delete_list.append(str(local_csv_dir) + '/report.pdf')
  111. except Exception as e2:
  112. error_bag_list = parse_json_to_string_array(error_bag_json)
  113. error_bag_list.append(parse_prefix)
  114. list_to_json_file(error_bag_list, error_bag_json)
  115. logging.exception("生成csv报错: %s", e2)
  116. # ------- 获取合并之后的bag包,解析出csv -------
  117. if __name__ == '__main__':
  118. # 1 创建阿里云对象
  119. auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
  120. endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
  121. bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
  122. while True:
  123. logging.info("开始新一轮扫描:%s " % key1)
  124. try:
  125. local_delete_list = []
  126. oss_delete_list = []
  127. prefix_list = []
  128. # 2 获取已经上传完成的所有目录并分组
  129. for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
  130. if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'): # data_merge下的bag是等待解析的
  131. # 获取合并后的包
  132. merged_bag_object_key = str(obj1.key)
  133. merged_bag_object_key_split = merged_bag_object_key.split('/')
  134. merged_prefix = '/'.join(merged_bag_object_key_split[:-1]) # data_merge 目录
  135. parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
  136. parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse').replace('.bag',
  137. '/') # data_parse 目录
  138. csv1_done = False
  139. csv2_done = False
  140. csv3_done = False
  141. csv4_done = False
  142. pdf_done = False
  143. for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): # 判断 data_parse 目录下是否有解析后的文件
  144. if '/trajectory_pji.csv' in str(obj2.key):
  145. csv1_done = True
  146. if '/ego_pji.csv' in str(obj2.key):
  147. csv2_done = True
  148. if '/pos_pji.csv' in str(obj2.key):
  149. csv3_done = True
  150. if '/objects_pji.csv' in str(obj2.key):
  151. csv4_done = True
  152. if '/report.pdf' in str(obj2.key):
  153. pdf_done = True
  154. if csv1_done and csv2_done and csv3_done and csv4_done and pdf_done:
  155. continue
  156. error_bag_list = parse_json_to_string_array(error_bag_json)
  157. if parse_prefix_full in error_bag_list:
  158. continue
  159. logging.info("开始生成场景还原csv: %s" % str(obj1.key))
  160. local_merged_bag_path = path3 + merged_bag_object_key
  161. local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
  162. local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
  163. if not os.path.exists(local_merged_dir):
  164. os.makedirs(local_merged_dir)
  165. if not os.path.exists(local_parse_dir):
  166. os.makedirs(local_parse_dir)
  167. merged_bag_full_name = merged_bag_object_key_split[-1]
  168. merged_bag_name = merged_bag_full_name.split('.')[0]
  169. try:
  170. bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
  171. except Exception as e:
  172. logging.exception("下载合并后的bag包失败: %s" % str(e))
  173. local_delete_list.append(local_merged_bag_path)
  174. # 2 生成 pos_orig.csv 和 pos_hmi.csv
  175. parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
  176. # 删除本地临时文件
  177. if len(local_delete_list) > 0:
  178. for local_delete in local_delete_list:
  179. try:
  180. os.remove(local_delete)
  181. except Exception as e:
  182. logging.exception("捕获到一个异常: %s" % str(e))
  183. except Exception as e:
  184. logging.exception("全局错误处理: %s" % str(e))
  185. time.sleep(sleep_time)