LingxinMeng há 6 meses atrás
pai
commit
c195973cf4
1 ficheiros alterados com 39 adições e 7 exclusões
  1. 39 7
      src/python2/pjibot/csv-pjibot_guide.py

+ 39 - 7
src/python2/pjibot/csv-pjibot_guide.py

@@ -2,6 +2,7 @@
 import os
 import time
 import oss2
+import json
 
 from resource import bagtocsv_rebot
 
@@ -17,8 +18,44 @@ logging.basicConfig(filename=path1 + 'log/csv-pjibot_guide.log', level=logging.I
                     format='%(asctime)s - %(levelname)s - %(message)s')
 
 key1 = 'pjibot/'
-sleep_time = 2  # 每多少秒扫描一次
-
+sleep_time = 30  # 每多少秒扫描一次
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot/csv-errorBag.json"
+def parse_json_to_string_array(file_path):
+    try:
+        # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
+        with open(file_path, 'r') as file:
+            # 读取文件内容
+            file_content = file.read()
+            # 解析JSON内容(Python 2中json.loads用于解析字符串)
+            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
+
+        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
+        if isinstance(data, list):
+            for item in data:
+                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
+                    raise ValueError("JSON数组中的元素不是字符串")
+            return data
+        else:
+            return []
+    except Exception as e:
+        return []
+def list_to_json_file(data, file_path):
+    """
+    将列表转换为JSON格式并写入指定的文件路径。
+    如果文件已存在,则覆盖它。
+
+    参数:
+    data (list): 要转换为JSON的列表。
+    file_path (str): 要写入JSON数据的文件路径。
+    """
+    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
+    json_data = json.dumps(data, ensure_ascii=False, indent=4)
+    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
+
+    # 以写入模式打开文件,如果文件已存在则覆盖
+    with open(file_path, 'w') as file:
+        # 将UTF-8编码的JSON字符串写入文件
+        file.write(json_data_utf8)
 
 def parse_csv(costmap_bag, data_bag, parse_prefix, local_parse_dir, local_delete_list):
     try:
@@ -78,8 +115,6 @@ def parse_csv(costmap_bag, data_bag, parse_prefix, local_parse_dir, local_delete
 if __name__ == '__main__':
     # 1 创建阿里云对象
     auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    # cname = 'http://open-bucket.oss.icvdc.com'
-    # bucket = oss2.Bucket(auth, cname, 'open-bucket', is_cname=True)
     endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
     bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
@@ -93,7 +128,6 @@ if __name__ == '__main__':
             for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
                 # 获取合并后的包
                 merged_bag_object_key = str(obj1.key)
-                # print(f'判断1{merged_bag_object_key}')
                 if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'):
                     merged_bag_object_key_split = merged_bag_object_key.split('/')
                     merged_prefix = '/'.join(merged_bag_object_key_split[:-1])
@@ -113,10 +147,8 @@ if __name__ == '__main__':
                         if '/merged_obstacles.csv' in str(obj2.key):
                             csv3_done = True
                     if not callback_undone:  # 没有 callback.json
-                        # logging.info("数据已回调完成,不需要处理: %s" % str(parse_prefix_full))
                         continue
                     if csv1_done and csv2_done and csv3_done:
-                        # logging.info("存在场景还原csv: %s" % str(parse_prefix_full))
                         continue
 
                     logging.info("开始生成场景还原csv: %s" % str(obj1.key))