csv-pjisuv.py 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import time
  4. import oss2
  5. from resource import pos_orig_csv
  6. from resource import drive_csv
  7. import logging
  8. path1 = '/mnt/disk001/dcl_data_process/src/python2/pjisuv/'
  9. logging.basicConfig(filename=path1 + 'log/csv.log', level=logging.INFO,
  10. format='%(asctime)s - %(levelname)s - %(message)s')
  11. key1 = 'pjisuv/'
  12. sleep_time = 60 # 每多少秒扫描一次
  13. def parse_to_csv(merged_bag_file_path, parse_prefix, local_parse_dir, local_delete_list):
  14. try:
  15. logging.info("开始解析 pos_orig.csv: %s" % str(merged_bag_file_path))
  16. pos_orig_csv.parse(merged_bag_file_path, local_parse_dir)
  17. logging.info("开始解析 drive.csv: %s" % str(merged_bag_file_path))
  18. local_csv_dir = drive_csv.parse(merged_bag_file_path, local_parse_dir)
  19. local_file_name1 = 'pos'
  20. local_file_name2 = 'drive'
  21. local_path1 = str(local_csv_dir) + '/' + local_file_name1 + '.csv'
  22. local_path2 = str(local_csv_dir) + '/' + local_file_name2 + '.csv'
  23. oss_file_name1 = 'pos_orig'
  24. oss_file_name2 = 'drive'
  25. # 删除本地文件
  26. local_delete_list.append(local_path1)
  27. local_delete_list.append(local_path2)
  28. oss_csv_object_key1 = parse_prefix + oss_file_name1 + '.csv'
  29. oss_csv_object_key2 = parse_prefix + oss_file_name2 + '.csv'
  30. bucket.put_object_from_file(oss_csv_object_key1, local_path1)
  31. logging.info("上传csv成功: %s" % str(oss_csv_object_key1))
  32. bucket.put_object_from_file(oss_csv_object_key2, local_path2)
  33. logging.info("上传csv成功: %s" % str(oss_csv_object_key2))
  34. return local_parse_dir
  35. except Exception as e:
  36. logging.exception("生成csv报错: %s" % str(e))
  37. '''
  38. cname:http://open-bucket.oss.icvdc.com
  39. 内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
  40. oss桶名: open-bucket
  41. keyid:n8glvFGS25MrLY7j
  42. secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
  43. '''
  44. # ------- 获取合并之后的bag包,解析出csv -------
  45. if __name__ == '__main__':
  46. # 1 创建阿里云对象
  47. auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
  48. # cname = 'http://open-bucket.oss.icvdc.com'
  49. # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True)
  50. endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
  51. bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
  52. while True:
  53. try:
  54. logging.info("开始新一轮扫描")
  55. prefix_list = []
  56. # 2 获取已经上传完成的所有目录并分组
  57. for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
  58. local_delete_list = []
  59. oss_delete_list = []
  60. try:
  61. # 获取合并后的包
  62. merged_bag_object_key = str(obj1.key)
  63. # print(f'判断1{merged_bag_object_key}')
  64. if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'):
  65. # print(f'需要解析{merged_bag_object_key}')
  66. merged_bag_object_key_split = merged_bag_object_key.split('/')
  67. merged_prefix = '/'.join(merged_bag_object_key_split[:-1])
  68. parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
  69. parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/'
  70. csv1_done = False
  71. # csv2_done = False
  72. csv3_done = False
  73. for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):
  74. if '/pos_orig.csv' in str(obj2.key):
  75. csv1_done = True
  76. # if '/pos_hmi.csv' in str(obj2.key):
  77. # csv2_done = True
  78. if '/drive.csv' in str(obj2.key):
  79. csv3_done = True
  80. if csv1_done and csv3_done:
  81. # if csv1_done and csv2_done and csv3_done:
  82. continue
  83. logging.info("需要解析: %s" % merged_bag_object_key)
  84. local_merged_bag_path = path1 + 'csv/' + merged_bag_object_key
  85. local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
  86. local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
  87. if not os.path.exists(local_merged_dir):
  88. os.makedirs(local_merged_dir)
  89. if not os.path.exists(local_parse_dir):
  90. os.makedirs(local_parse_dir)
  91. merged_bag_full_name = merged_bag_object_key_split[-1]
  92. merged_bag_name = merged_bag_full_name.split('.')[0]
  93. logging.info("开始下载合并后的 bag 包: %s" % merged_bag_object_key)
  94. bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
  95. logging.info("下载合并后的 bag 包完成: %s" % merged_bag_object_key)
  96. local_delete_list.append(local_merged_bag_path)
  97. # 2 生成 pos_orig.csv 和 pos_hmi.csv
  98. logging.info("生成 pos_orig.csv 和 pos_hmi.csv: %s" % parse_prefix_full)
  99. parse_to_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
  100. except Exception as e:
  101. logging.exception("局部异常处理: %s", str(e))
  102. # 删除本地临时文件
  103. if len(local_delete_list) > 0:
  104. for local_delete in local_delete_list:
  105. try:
  106. os.remove(local_delete)
  107. except Exception as e:
  108. logging.exception("删除本地临时文件: %s" % str(e))
  109. time.sleep(sleep_time)
  110. except Exception as e:
  111. logging.exception("全局异常处理: %s", str(e))