LingxinMeng hai 6 meses
pai
achega
e8bf5cb844
Modificáronse 44 ficheiros con 1282 adicións e 1131 borrados
  1. 1 0
      .gitignore
  2. 2 1
      src/python2/pjibot/camera-errorBag.json
  3. 70 68
      src/python2/pjibot/csv-errorBag.json
  4. 91 99
      src/python2/pjibot/csv-pjibot_guide.py
  5. 161 133
      src/python2/pjibot/resource/bagtocsv_robot.py
  6. 0 85
      src/python2/pjibot/resource/create_video_from_pcd_bak.py
  7. 0 0
      src/python2/pjibot/utils/__init__.py
  8. 45 0
      src/python2/pjibot/utils/json_utils.py
  9. 1 0
      src/python2/pjibot_delivery/2callback-nohup.sh
  10. 23 21
      src/python2/pjibot_delivery/2callback-pjibot_delivery.py
  11. 11 0
      src/python2/pjibot_delivery/2csv-errorBag.json
  12. 1 0
      src/python2/pjibot_delivery/2csv-nohup.sh
  13. 100 66
      src/python2/pjibot_delivery/2csv-pjibot_delivery.py
  14. 1 0
      src/python2/pjibot_delivery/2pcd-nohup.sh
  15. 21 18
      src/python2/pjibot_delivery/2pcd-pjibot_delivery.py
  16. 0 0
      src/python2/pjibot_delivery/2xosc-errorBag.json
  17. 2 1
      src/python2/pjibot_delivery/2xosc-nohup.sh
  18. 61 21
      src/python2/pjibot_delivery/2xosc-pjibot_delivery.py
  19. 21 14
      src/python2/pjibot_delivery/callback-pjibot_delivery.py
  20. 2 2
      src/python2/pjibot_delivery/camera-pjibot_delivery.py
  21. 139 1
      src/python2/pjibot_delivery/csv-errorBag.json
  22. 1 1
      src/python2/pjibot_delivery/csv-nohup.sh
  23. 94 106
      src/python2/pjibot_delivery/csv-pjibot_delivery.py
  24. 12 77
      src/python2/pjibot_delivery/pcd-pjibot_delivery.py
  25. 34 26
      src/python2/pjibot_delivery/resource/bagtocsv_robot.py
  26. 2 3
      src/python2/pjibot_delivery/simulation-pjibot_delivery.py
  27. 0 0
      src/python2/pjibot_delivery/utils/__init__.py
  28. 44 0
      src/python2/pjibot_delivery/utils/json_utils.py
  29. 51 95
      src/python2/pjibot_delivery/xosc-pjibot_delivery.py
  30. 21 29
      src/python2/pjibot_patrol/callback-pjibot_patrol.py
  31. 0 8
      src/python2/pjibot_patrol/camera-nohup.sh
  32. 0 94
      src/python2/pjibot_patrol/camera-pjibot_patrol.py
  33. 98 63
      src/python2/pjibot_patrol/csv-pjibot_patrol.py
  34. 21 26
      src/python2/pjibot_patrol/pcd-pjibot_patrol.py
  35. 34 26
      src/python2/pjibot_patrol/resource/bagtocsv_robot.py
  36. 4 8
      src/python2/pjibot_patrol/simulation-pjibot_patrol.py
  37. 0 0
      src/python2/pjibot_patrol/utils/__init__.py
  38. 44 0
      src/python2/pjibot_patrol/utils/json_utils.py
  39. 65 31
      src/python2/pjibot_patrol/xosc-pjibot_patrol.py
  40. 0 3
      src/python2/pjisuv/2simulation-errorBag.json
  41. 1 2
      src/python2/pjisuv/xosc-errorBag.json
  42. 1 1
      src/python2/pjisuv/xosc-pjisuv.py
  43. BIN=BIN
      src/python3/pjibot_outdoor/a.out
  44. 2 2
      src/python3/pjibot_outdoor/jiqiren_outdoor.py

+ 1 - 0
.gitignore

@@ -7,6 +7,7 @@ src/python2/pjibot_delivery/csv
 src/python2/pjibot_delivery/pcd
 src/python2/pjibot_delivery/log
 src/python2/pjibot_delivery/pjibot_delivery
+src/python2/pjibot_delivery/2csv
 src/python2/pjibot_patrol/camera
 src/python2/pjibot_patrol/csv
 src/python2/pjibot_patrol/log

+ 2 - 1
src/python2/pjibot/camera-errorBag.json

@@ -60,5 +60,6 @@
     "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-40-28/", 
     "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-45-29/", 
     "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-09-46-26_obstacledetection_42/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-11-18-30_obstacledetection_30/"
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-11-18-30_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-12-05-07-17-30_obstacledetection_60/"
 ]

+ 70 - 68
src/python2/pjibot/csv-errorBag.json

@@ -1,84 +1,86 @@
 [
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/2024-09-04-10-54-22_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-07-09-14-21-33_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-07-09-14-21-48_obstacledetection_67/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-05-21-49-38_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-06-00-38-07_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-07-07-16-11_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-08-39-25_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-08-57-58_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-09-55-16_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-05-05_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-10-48-35_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-18-07_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-18-47_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-32-18_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-39-28_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-52-13_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-54-16_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-19-33_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-30-47_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-41-00_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-46-11_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-04-24_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-05-05_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-16-13_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-17-07_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-44-54_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-14-06-34_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-14-07-41_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-06-36_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-22-44_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-34-05_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-46-25_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-04-39_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-35-53_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-45-18_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-21-22-14_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-21-30-22_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-04-02-26-31_obstacledetection_38/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-01-17_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-34-05_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-10-28-16-09-39_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-40-28/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-11-48_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-43-02/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/2024-09-04-10-54-22_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-06-00-38-07_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-31-44_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-18-47_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-26-32_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-28-41_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-30-14_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-41-00_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-16-13_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-46-25_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-00-07_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-01-17_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-06-18_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-06-46_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-25-14_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-31-44_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-09-55-16_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-28-41_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-35-53_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-07-09-53-10_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-17-07_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-15-10-36-14_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-32-26/", 
+    "pjibot/pjibot-P1YNYD1M227000116/data_parse/2024-11-07-16-39-17_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-32-18_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-06-36_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-59-29_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-19-05_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-48-28_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-52-30_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-01-17-06_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-01-19-13_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-11-18-30_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-19-33_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-30-25/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-08-57-58_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-11-39-55/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-21-30-22_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-44-54_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-13-04-24_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-18-12-00-09_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-10-28-16-09-39_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000116/data_parse/2024-10-29-09-00-48_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M227000116/data_parse/2024-11-07-16-39-17_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M228000127/data_parse/2024-07-09-09-53-21_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-04-02-26-31_obstacledetection_38/", 
-    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-07-09-31-48_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-07-09-53-10_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-14-07-41_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-18-07_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-34-00/", 
+    "pjibot/pjibot-P1YNYD1M229000131/data_parse/2024-11-11-02-45-56_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-07-09-14-21-48_obstacledetection_67/", 
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-09-46-26_obstacledetection_42/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-01-19-13_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-08-13-16-01-31_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-12-03-18-13-53_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-10-10-02-45-27_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-14-06-34_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-05-21-49-38_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-01-17-06_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-12-05-07-17-30_obstacledetection_60/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-08-39-25_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-06-18_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-08-07-07-16-11_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-25-14_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-15-10-53-53_obstacledetection_44/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-39-28_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-46-11_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-10-27-21-12-31_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M229000131/data_parse/2024-11-11-02-40-36_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M229000131/data_parse/2024-11-11-02-45-56_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-23-06-46_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-45-18_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-12-30-47_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-54-16_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-20-04-39_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000129/data_parse/2024-07-07-09-31-48_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-48-28_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-12-09-44-23_obstacledetection_29/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-14-17-59-21_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-15-10-36-14_obstacledetection_30/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-15-10-53-53_obstacledetection_44/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-11-39-55/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-31-10/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-32-26/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-34-00/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-43-02/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-19-22-44_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-26-32_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-45-29/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-19-05_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-03-11-18-47_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M228000127/data_parse/2024-07-09-09-53-21_obstacledetection_30/", 
     "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-27-25/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-30-25/", 
     "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-47-59/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-40-28/", 
-    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-45-29/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-09-46-26_obstacledetection_42/", 
-    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-29-11-18-30_obstacledetection_30/"
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-05-22-30-14_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M223000101/data_parse/2024-11-14-17-59-21_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-07-09-14-21-33_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M229000131/data_parse/2024-11-11-02-40-36_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M227000116/data_parse/2024-10-29-09-00-48_obstacledetection_30/", 
+    "pjibot/pjibot-P1YNYD1M225000112/data_parse/TS100M36-BJ-dock-2024-09-29-15-31-10/", 
+    "pjibot/pjibot-P1YNYD1M227000115/data_parse/2024-09-06-00-52-30_obstacledetection_30/"
 ]

+ 91 - 99
src/python2/pjibot/csv-pjibot_guide.py

@@ -1,138 +1,135 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 - 开始 ------- 
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
 import os
 import time
 import oss2
 import json
 import subprocess
-
-from resource import bagtocsv_rebot
-
 import logging
-
+import io
+from resource import bagtocsv_robot
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot/'
 path2 = '/mnt/disk001/run' # 生成评价报告pdf的可执行文件路径
 path3 = '/mnt/disk001/dcl_data_process/src/python3/pjibot_indoor'
 path4 = '/mnt/disk001/dcl_data_process/src/python3/pjibot_indoor/filter_pos.py'
 pgm_path = '/mnt/disk001/dcl_data_process/src/python3/pjibot_indoor/resource/shiyanshi_newpgm_20240416.pgm'
-
-logging.basicConfig(filename=path1 + 'log/csv-pjibot_guide.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+log_file_path = path1 + 'log/csv-pjibot_guide.log'
 key1 = 'pjibot/'
 sleep_time = 10  # 每多少秒扫描一次
 error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot/csv-errorBag.json"
-def parse_json_to_string_array(file_path):
-    try:
-        # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
-        with open(file_path, 'r') as file:
-            # 读取文件内容
-            file_content = file.read()
-            # 解析JSON内容(Python 2中json.loads用于解析字符串)
-            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
-
-        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
-        if isinstance(data, list):
-            for item in data:
-                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
-                    raise ValueError("JSON数组中的元素不是字符串")
-            return data
-        else:
-            return []
-    except Exception as e:
-        return []
-def list_to_json_file(data, file_path):
-    """
-    将列表转换为JSON格式并写入指定的文件路径。
-    如果文件已存在,则覆盖它。
+logging.basicConfig(filename=log_file_path, level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
+# ------- 全局变量 - 结束 -------
 
-    参数:
-    data (list): 要转换为JSON的列表。
-    file_path (str): 要写入JSON数据的文件路径。
-    """
-    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
-    json_data = json.dumps(data, ensure_ascii=False, indent=4)
-    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
-
-    # 以写入模式打开文件,如果文件已存在则覆盖
-    with open(file_path, 'w') as file:
-        # 将UTF-8编码的JSON字符串写入文件
-        file.write(json_data_utf8)
 
+# ------- 解析 csv 主要函数,调用 csv 解析脚本 -------
 def parse_csv(costmap_bag, data_bag, parse_prefix, local_parse_dir, local_delete_list):
     try:
-        bagtocsv_rebot.parse(costmap_bag, data_bag, local_parse_dir + '/csv/')
+        bagtocsv_robot.parse(costmap_bag, data_bag, local_parse_dir + '/csv/')
         bagname = data_bag.split('/')[-1].split('.')[0]
         local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)
+        
+        # ------- 上传 csv - 开始 -------
         csv_file_name1 = 'pos_pji'
         local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
-        local_delete_list.append(local_csv_file_path1)
         oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        if os.path.exists(local_csv_file_path1):
+            bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+            local_delete_list.append(local_csv_file_path1)
+        else:
+            logging.error("没有 pos_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name2 = 'EgoState_pji'
         local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
-        local_delete_list.append(local_csv_file_path2)
         oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
-
-        # 下载 track.png
+        if os.path.exists(local_csv_file_path2):
+            bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+            local_delete_list.append(local_csv_file_path2)
+        else:
+            logging.error("没有 EgoState_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        
+        csv_file_name3 = 'merged_obstacles'
+        local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
+        oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
+        if os.path.exists(local_csv_file_path3):
+            bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+            local_delete_list.append(local_csv_file_path3)
+        else:
+            logging.error("没有 merged_obstacles.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        # ------- 上传 csv - 结束 -------
+        
+        # ------- 处理 output.json - 开始 -------
+        try:
+            # 1 解析 output.json
+            output_json_path = str(local_csv_dir)+'/output.json'
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                callback_json_oss_key = parse_prefix+'callback.json'
+                callback_json_local = local_csv_dir+'/callback.json'
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                data['check'] = list(set(data['check']))
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            # todo 可能没有callback.json,已经处理成 callback_done.json了,暂时不管
+            pass
+        # ------- 处理 output.json - 结束 -------
+        
+
+        # ------- 生成 pdf 报告 - 开始 -------
+        pdf_local_path = str(local_csv_dir) + '/report.pdf'
         track_png_key = parse_prefix + 'track.png'
         bucket.get_object_to_file(track_png_key, str(local_csv_dir) + '/track1.png')
-        # 生成pdf 报告
         os.chdir(path2)
-        command1 = './pji_single ' + str(local_csv_dir) + '/ ' + str(local_csv_dir) + '/ ' + str(
-            local_csv_dir) + '/track1.png ' + data_bag.split('/')[-1].split('.')[0]
+        command1 = './pji_single ' + str(local_csv_dir) + '/ ' + str(local_csv_dir) + '/ ' + str(local_csv_dir) + '/track1.png ' + data_bag.split('/')[-1].split('.')[0]
         logging.info("调用生成pdf 报告命令: %s" % str(command1))
         os.system(command1)
-        bucket.put_object_from_file(parse_prefix + 'report.pdf',
-                                    str(local_csv_dir) + '/report.pdf')
-        local_delete_list.append(str(local_csv_dir) + '/report.pdf')
-
-        # ---------------------------------------------------
-        csv_file_name3 = 'merged_obstacles'
-        local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
-        oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        if os.path.exists(pdf_local_path):
+            bucket.put_object_from_file(parse_prefix + 'report.pdf',pdf_local_path)
+            local_delete_list.append(pdf_local_path)
+        else:
+            logging.error("没有 report.pdf")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        # ------- 生成 pdf 报告 - 结束 -------
+        
 
-        # 根据 merged_obstacles 和 机器人指定的pgm文件 生成新的 merged_obstacles 文件
+        # ------- 根据 merged_obstacles 和 机器人指定的pgm文件 生成新的 merged_obstacles 文件 -------
         os.chdir(path3)
-
-        # 构建命令
         command2 = "python2 {} {} {}".format(path4, pgm_path, local_csv_file_path3)
         logging.info("调用命令 merged_obstacles_new.csv 生成命令: %s", command2)
-
-        try:
-            # 使用 Popen 执行命令并捕获输出
-            process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            stdout, stderr = process.communicate()  # 获取标准输出和错误输出
-            if process.returncode == 0:
-                logging.info("命令输出: %s", stdout)
-            else:
-                logging.error("命令执行失败,错误信息: %s", stderr)
-        except Exception as e:
-            logging.error("执行命令时出现异常: %s", str(e))
+        process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = process.communicate()  # 获取标准输出和错误输出
+        if process.returncode == 0:
+            logging.info("命令输出: %s", stdout)
+        else:
+            logging.error("命令执行失败,错误信息: %s", stderr)
         time.sleep(2)
-        bucket.put_object_from_file(oss_csv_object_key3.replace('merged_obstacles', 'merged_obstacles_new'),
-                                    local_csv_file_path3)  # 因为生成的文件是同名覆盖的
-        # todo 暂时不删除 排查问题
-        local_delete_list.append(local_csv_file_path3)
-
-        # 这里不要删除了,不然无法复现pdf
-        # bucket.delete_object(track_png_key)
+        bucket.put_object_from_file(oss_csv_object_key3.replace('merged_obstacles', 'merged_obstacles_new'),local_csv_file_path3)  # 因为生成的文件是同名覆盖的
     except Exception as e2:
-        error_bag_list = parse_json_to_string_array(error_bag_json)
-        error_bag_list.append(parse_prefix)
-        list_to_json_file(error_bag_list, error_bag_json)
-        # 当出现异常时执行的代码
         logging.exception("生成csv报错: %s", e2)
+        json_utils.add_error(parse_prefix,error_bag_json)
 
 
 # ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         logging.info("开始新一轮扫描:%s " % key1)
         time.sleep(sleep_time)
@@ -164,7 +161,7 @@ if __name__ == '__main__':
                             pdf_done = True
                     if csv1_done and csv2_done and csv3_done and pdf_done:
                         continue
-                    error_bag_list = parse_json_to_string_array(error_bag_json)
+                    error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
                     if parse_prefix_full in error_bag_list:
                         continue
                     logging.info("开始解析csv并生成评价报告: %s" % str(obj1.key))
@@ -177,19 +174,14 @@ if __name__ == '__main__':
                         os.makedirs(local_parse_dir)
                     merged_bag_full_name = merged_bag_object_key_split[-1]
                     merged_bag_name = merged_bag_full_name.split('.')[0]
-                    try:
-                        bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
-                    except Exception as e:
-                        logging.exception("下载合并后的bag包失败: %s" % str(e))
-
+                    bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
                     local_delete_list.append(local_merged_bag_path)
                     # 下载costmap.bag
                     costmap_key = merged_prefix.replace('data_merge', '') + 'costmap.bag'
                     logging.info("costmap.bag包的key为: %s" % str(costmap_key))
                     bucket.get_object_to_file(costmap_key, local_merged_dir + 'costmap.bag')
                     # 2 生成 pos_orig.csv 和 pos_hmi.csv
-                    parse_csv(local_merged_dir + 'costmap.bag', local_merged_bag_path, parse_prefix_full,
-                              local_parse_dir, local_delete_list)
+                    parse_csv(local_merged_dir + 'costmap.bag', local_merged_bag_path, parse_prefix_full,local_parse_dir, local_delete_list)
 
             # 删除本地临时文件
             if len(local_delete_list) > 0:

+ 161 - 133
src/python2/pjibot/resource/bagtocsv_rebot.py → src/python2/pjibot/resource/bagtocsv_robot.py

@@ -1,9 +1,9 @@
 # coding: utf-8
-# !/usr/bin/env python2
+#!/usr/bin/env python2
 import os
 import rosbag
 import csv
-import math
+import math 
 import rospy
 import sys
 import time
@@ -11,8 +11,8 @@ import numpy as np
 from datetime import datetime
 import argparse
 import pandas as pd
-
-# from nav_msgs.msg import OccupancyGrid
+import json
+#from nav_msgs.msg import OccupancyGrid
 
 
 global_height, global_costmap, global_origin, global_resolution = None, None, None, None
@@ -92,7 +92,6 @@ def process_local_rosbag(bagfile, local_topic):
 
     return df
 
-
 def merge_obstacles(df, global_height):
     df.sort_values(by=['frame_time', 'x', 'y'], inplace=True)
 
@@ -133,72 +132,75 @@ def merge_obstacles(df, global_height):
     return result_df
 
 
-def parsezihao(input_dir, output_dir):
-    dic_EgoState = ['Time', 'simTime', 'simFrame', 'posX', 'posY', 'posZ', 'speedX', 'speedY', 'speedZ', 'accelX',
-                    'accelY', 'accelZ',
-                    'dimX', 'dimY', 'dimZ', 'obstacle', 'traveledDist']
-    # dic_DriverCtrl=['Time','simTime','simFrame','tarspeedX','tarspeedY','tarspeedZ','tardimX','tardimY','tardimZ']
-    EgoState_file = open(output_dir + "/" + "EgoState_pji.csv", 'w')
-    # DriverCtrl_file = open(output_dir + "/"+"DriverCtrl_pji.csv", 'w')
+
+
+def parsezihao(input_dir, output_dir):   
+    dic_EgoState = ['Time','simTime','simFrame','posX','posY','posZ','speedX','speedY','speedZ','accelX','accelY','accelZ',
+                            'dimX','dimY','dimZ','obstacle','traveledDist']
+    #dic_DriverCtrl=['Time','simTime','simFrame','tarspeedX','tarspeedY','tarspeedZ','tardimX','tardimY','tardimZ']
+    EgoState_file = open(output_dir + "/"+"EgoState_pji.csv", 'w')
+    #DriverCtrl_file = open(output_dir + "/"+"DriverCtrl_pji.csv", 'w')
     writer_EgoState = csv.writer(EgoState_file)
     writer_EgoState.writerow(dic_EgoState)
-    # writer_DriverCtrl = csv.writer(DriverCtrl_file)
-    # writer_DriverCtrl.writerow(dic_DriverCtrl)
+    #writer_DriverCtrl = csv.writer(DriverCtrl_file)
+    #writer_DriverCtrl.writerow(dic_DriverCtrl)
 
-    frame_max = sys.maxsize
-    count = 1
-    with rosbag.Bag(input_dir, 'r') as bag:
-        odom_flag = False
-        first_message_time = None
-        Frame_imu = 1
-        Frame_cmd_vel = 1
-        obstacle_state = 0
-        cur_mileage = ''
-
-        for topic, msg, t in bag.read_messages():  # t代表时间
 
-            if first_message_time is None:
+    frame_max=sys.maxsize
+    count=1
+    with rosbag.Bag(input_dir ,'r') as bag:
+        odom_flag=False
+        first_message_time = None
+        Frame_imu=1
+        Frame_cmd_vel=1
+        obstacle_state=0
+        cur_mileage=''
+        
+
+        for topic,msg,t in bag.read_messages(topics=['/obstacle_detection','sys_info','odom','imu']):    #t代表时间
+            
+            if first_message_time is None:  
                 first_message_time = t
                 first_message_time = rospy.Time.to_sec(first_message_time)
                 first_message_time = datetime.fromtimestamp(first_message_time)
-
+            
             if topic == "/obstacle_detection":
-                obstacle_state = msg.data
-                # print(msg.data)
-
+                obstacle_state=msg.data
+                #print(msg.data)
+                
             if topic == "/sys_info":
-                cur_mileage = msg.cur_mileage
-
-            if topic == "/odom":
-                odom_flag = True
-                posX = msg.pose.pose.position.x
-                posY = msg.pose.pose.position.y
-                posZ = msg.pose.pose.position.z
-                speedX = msg.twist.twist.linear.x * 3.6
-                speedY = msg.twist.twist.linear.y * 3.6
-                speedZ = msg.twist.twist.linear.z * 3.6
-                dimX = msg.twist.twist.angular.x
-                dimY = msg.twist.twist.angular.y
-                dimZ = msg.twist.twist.angular.z
-
+                cur_mileage=msg.cur_mileage
+          
+
+            if topic == "/odom": 
+                odom_flag=True 
+                posX=msg.pose.pose.position.x
+                posY=msg.pose.pose.position.y
+                posZ=msg.pose.pose.position.z
+                speedX=msg.twist.twist.linear.x*3.6
+                speedY=msg.twist.twist.linear.y*3.6
+                speedZ=msg.twist.twist.linear.z*3.6
+                dimX=msg.twist.twist.angular.x
+                dimY=msg.twist.twist.angular.y
+                dimZ=msg.twist.twist.angular.z
+                
             if topic == "/imu":
-
+               
                 if odom_flag:
-                    accelX = msg.linear_acceleration.x
-                    accelY = msg.linear_acceleration.y
-                    accelZ = msg.linear_acceleration.z
+                    accelX=msg.linear_acceleration.x
+                    accelY=msg.linear_acceleration.y
+                    accelZ=msg.linear_acceleration.z
                     timestamp = rospy.Time.to_sec(t)
                     date_time_imu = datetime.fromtimestamp(timestamp)
-                    simtime_imu = (date_time_imu - first_message_time).total_seconds()
-
-                    message_EgoState = [date_time_imu, simtime_imu, Frame_imu, posX, posY, posZ, speedX, speedY, speedZ,
-                                        accelX, accelY,
-                                        accelZ, dimX, dimY, dimZ, obstacle_state, cur_mileage]
+                    simtime_imu=(date_time_imu-first_message_time).total_seconds()
 
+                    message_EgoState =[date_time_imu,simtime_imu,Frame_imu,posX,posY,posZ,speedX,speedY,speedZ,accelX,accelY,
+                              accelZ,dimX,dimY,dimZ,obstacle_state,cur_mileage]
+                 
                     writer_EgoState.writerow(message_EgoState)
-                    Frame_imu += 1
+                    Frame_imu+=1
                 else:
-                    print('6666')
+                    print('6666')               
             '''        
             if topic =='/cmd_vel':
                 timestamp = rospy.Time.to_sec(t)
@@ -210,109 +212,130 @@ def parsezihao(input_dir, output_dir):
                                      
                 writer_DriverCtrl.writerow(message_DriverCtrl)
                 Frame_cmd_vel+=1
-            '''
+            '''           
 
         EgoState_file.close()
-        # DriverCtrl_file.close()
+        #DriverCtrl_file.close()
 
 
-def parsehancheng(input_dir, output_dir):
+def parsehancheng(input_dir, output_dir):   
     def quaternion_to_euler(x, y, z, w):
         # 将四元数归一化
         try:
-            length = np.sqrt(x ** 2 + y ** 2 + z ** 2 + w ** 2)
+            length = np.sqrt(x**2 + y**2 + z**2 + w**2)
             x /= length
             y /= length
             z /= length
             w /= length
-
+        
             # 计算欧拉角
-            # roll = np.arctan2(2*(w*x + y*z), 1 - 2*(x**2 + y**2))
-            # pitch = np.arcsin(2*(w*y - z*x))
-            yaw = np.arctan2(2 * (w * z + x * y), 1 - 2 * (y ** 2 + z ** 2))
-            return yaw
-        except:
+            #roll = np.arctan2(2*(w*x + y*z), 1 - 2*(x**2 + y**2))
+            #pitch = np.arcsin(2*(w*y - z*x))
+            yaw = np.arctan2(2*(w*z + x*y), 1 - 2*(y**2 + z**2))  
+            return  yaw
+        except :
             return 0
 
-    dic_object_detection = ['Time', 'FrameID', 'HeadingAngle', 'X', 'Y', 'Z']
-    object_detection_file = open(output_dir + "/" + "pos_pji.csv", 'w')
+    json_path=os.path.join(output_dir,'output.json')
+    dic_object_detection = ['Time','FrameID','HeadingAngle','X', 'Y' ,'Z']
+    object_detection_file = open(output_dir + "/"+"pos_pji.csv", 'w')
     writer_object_detection = csv.writer(object_detection_file)
     writer_object_detection.writerow(dic_object_detection)
 
-    frame_max = sys.maxsize
-    with rosbag.Bag(input_dir, 'r') as bag:
-        # flag=False
-        framenum = 1
-        # hasLoc = False
-        for topic, msg, t in bag.read_messages():  # t代表时间
-
-            if topic == "/amcl_pose":  # 100hz  /odom
-                poseX = msg.pose.pose.position.x
-                poseY = msg.pose.pose.position.y
-                poseZ = msg.pose.pose.position.z
-                orientationX = msg.pose.pose.orientation.x
-                orientationY = msg.pose.pose.orientation.y
-                orientationZ = msg.pose.pose.orientation.z
-                orientationW = msg.pose.pose.orientation.w
-                egoyaw = quaternion_to_euler(orientationX, orientationY, orientationZ, orientationW)
-                message_location = [str(t)[:-6], framenum, egoyaw, poseX, poseY, poseZ]
 
+    frame_max=sys.maxsize
+    with rosbag.Bag(input_dir ,'r') as bag:
+        poseX=poseY=0
+        #flag=False
+        framenum = 1
+        #hasLoc = False
+        #用来判断机器人点云/图像/规划/定位是否丢帧↓↓↓
+        num_cam1=0
+        rate_cam1=10
+        
+        num_cam2=0
+        rate_cam2=10
+        
+        cam1_exist_flag=False
+        cam2_exist_flag=False
+        
+        #num_pcd=0
+        #rate_pcd=10
+        
+        bag_start_time = bag.get_start_time()
+        bag_end_time = bag.get_end_time()
+        duration=bag_end_time-bag_start_time
+        
+        #Theoretical_pcd_num=int(duration*rate_pcd)
+        Theoretical_cam1_num=int(duration*rate_cam1)
+        Theoretical_cam2_num=int(duration*rate_cam2)
+
+        amcl_pose_lost_flag=True
+        
+        
+        #用来判断机器人点云/图像/规划/定位是否丢帧↑↑↑
+        for topic,msg,t in bag.read_messages(topics=['/amcl_pose','/scan_map_icp_amcl_node/scan_point_transformed','/ob_camera_01/color/image_raw','/ob_camera_02/color/image_raw']):    #t代表时间
+            '''
+            if topic == "/scan_map_icp_amcl_node/scan_point_transformed":
+                num_pcd+=1
+            '''    
+            if topic == "/ob_camera_01/color/image_raw":
+                cam1_exist_flag=True
+                num_cam1+=1
+                
+            if topic == "/ob_camera_01/color/image_raw":
+                cam2_exist_flag=True
+                num_cam2+=1
+        
+            if topic == "/amcl_pose":#100hz  /odom
+                
+                poseX=msg.pose.pose.position.x
+                poseY=msg.pose.pose.position.y
+                poseZ=msg.pose.pose.position.z
+                if poseX!=0 and poseY!=0:
+                    amcl_pose_lost_flag=False
+                orientationX=msg.pose.pose.orientation.x
+                orientationY=msg.pose.pose.orientation.y
+                orientationZ=msg.pose.pose.orientation.z
+                orientationW=msg.pose.pose.orientation.w
+                egoyaw=quaternion_to_euler(orientationX,orientationY,orientationZ,orientationW)
+                message_location =[str(t)[:-6],framenum,egoyaw,poseX,poseY,poseZ]
+                  
                 writer_object_detection.writerow(message_location)
-                framenum += 1
-                # hasLoc = False
-                # data_read_flag=False
-
-        # driving_status_file.close()
+                framenum+=1
         object_detection_file.close()
-
-
-if __name__ == "__main__":
-
-    global_bagfile = sys.argv[1]  # 全局地图的rosbag包
-    # global_bagfile = '/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428/costmap.bag'
-    input_dir = sys.argv[2]  # 触发采集的rosbag包
-    # input_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428/2024-04-25-15-58-27_obstacledetection_30.bag'
-    bagname = input_dir.split('/')[-1].split('.')[0]
-    output_dir = sys.argv[3]  # 输出文件路径
-    # output_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428'
-    global_topic = '/move_base/global_costmap/costmap'
-    local_topic = '/move_base/local_costmap/costmap'
-
-    output_dir = os.path.join(output_dir, bagname)
-    if not os.path.exists(output_dir):
-        os.makedirs(output_dir)
-    '''
-    df, global_height = process_rosbag(input_dir, global_topic, local_topic)
-    result_df = merge_obstacles(df, global_height)
-    result_df.to_csv(os.path.join(output_dir,'merged_obstacles.csv'), index=False)
-    parsehancheng(input_dir, output_dir)
-    parsezihao(input_dir, output_dir)
-    print('successfully analysis '+input_dir)
-    '''
-
-    try:
-        process_global_rosbag(global_bagfile, global_topic)
-        df = process_local_rosbag(input_dir, local_topic)
-        result_df = merge_obstacles(df, global_height)
-        result_df.to_csv(os.path.join(output_dir, 'merged_obstacles.csv'), index=False)
-        parsehancheng(input_dir, output_dir)
-        parsezihao(input_dir, output_dir)
-        print('successfully analysis ' + input_dir)
-    except Exception as e:
-        print(e)
-
+        
+        with open(json_path, "w") as file:
+            data = []
+            '''
+            if (Theoretical_pcd_num - num_pcd) / Theoretical_pcd_num > 0.5:
+                data.append('点云丢帧')
+            '''
+            if cam1_exist_flag==False and cam2_exist_flag==False:
+                data.append('图像缺失')
+            if cam1_exist_flag==True and cam2_exist_flag==True:
+                if ((Theoretical_cam1_num - num_cam1) / Theoretical_cam1_num > 0.5) or ((Theoretical_cam2_num - num_cam2) / Theoretical_cam2_num > 0.5):
+                    data.append('图像丢帧')
+            if amcl_pose_lost_flag :
+                data.append('自车数据缺失')
+        
+            if data == []:
+                data = ['正常']
+        
+            # 将数据转换为 JSON 格式并写入文件
+            json.dump(data, file, ensure_ascii=False)
 
 def parse(costmap_bag, data_bag, output_dir):
     global_bagfile = costmap_bag  # 全局地图的rosbag包
     # global_bagfile = '/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428/costmap.bag'
     input_dir = data_bag  # 触发采集的rosbag包
-    # input_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428/2024-04-25-15-58-27_obstacledetection_30.bag'
-    bagname = input_dir.split('/')[-1].split('.bag')[0]
-    # output_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428'
+    #input_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428/2024-04-25-15-58-27_obstacledetection_30.bag'
+    bagname=input_dir.split('/')[-1].split('.')[0]
+    #output_dir='/media/dell/56FAD34BFAD32651/data/pujin_datareturn/20240428'
     global_topic = '/move_base/global_costmap/costmap'
     local_topic = '/move_base/local_costmap/costmap'
-
-    output_dir = os.path.join(output_dir, bagname)
+    
+    output_dir=os.path.join(output_dir, bagname)
     if not os.path.exists(output_dir):
         os.makedirs(output_dir)
     '''
@@ -328,9 +351,14 @@ def parse(costmap_bag, data_bag, output_dir):
         process_global_rosbag(global_bagfile, global_topic)
         df = process_local_rosbag(input_dir, local_topic)
         result_df = merge_obstacles(df, global_height)
-        result_df.to_csv(os.path.join(output_dir, 'merged_obstacles.csv'), index=False)
+        result_df.to_csv(os.path.join(output_dir,'merged_obstacles.csv'), index=False)
         parsehancheng(input_dir, output_dir)
         parsezihao(input_dir, output_dir)
-        print('successfully analysis ' + input_dir)
+        print('successfully analysis '+input_dir)
     except Exception as e:
         print(e)
+        json_path=os.path.join(output_dir,'output.json')
+        with open(json_path, "w") as file:
+                data = ['解析程序错误']
+                # 将数据转换为 JSON 格式并写入文件
+                json.dump(data, file, ensure_ascii=False)

+ 0 - 85
src/python2/pjibot/resource/create_video_from_pcd_bak.py

@@ -1,85 +0,0 @@
-import sys
-import matplotlib.pyplot as plt
-import cv2
-import os
-import numpy as np
-from pyntcloud import PyntCloud
-
-
-def find_global_min_max(pcd_folder_path):
-    min_x, max_x = np.inf, -np.inf
-    min_y, max_y = np.inf, -np.inf
-    min_z, max_z = np.inf, -np.inf
-    pcd_files = [os.path.join(pcd_folder_path, f) for f in os.listdir(pcd_folder_path) if f.endswith('.pcd')]
-    for pcd_path in pcd_files:
-        cloud = PyntCloud.from_file(pcd_path)
-        points = cloud.points
-        min_x, max_x = min(min_x, points['x'].min()), max(max_x, points['x'].max())
-        min_y, max_y = min(min_y, points['y'].min()), max(max_y, points['y'].max())
-        min_z = 0
-        max_z = 0.1
-        # min_z, max_z = min(min_z, points['z'].min()), max(max_z, points['z'].max())
-    return (min_x, max_x), (min_y, max_y), (min_z, max_z)
-
-
-def process_pcd_to_top_view_image(pcd_path, output_path, axis_limits):
-    cloud = PyntCloud.from_file(pcd_path)
-    points = cloud.points
-    plt.figure(figsize=(10, 10))
-    ax = plt.axes(projection='3d')
-    ax.scatter(points['x'], points['y'], points['z'], s=1, color='blue')
-    ax.view_init(elev=90, azim=90)
-
-    # Set the axis limits
-    ax.set_xlim(axis_limits[0])
-    ax.set_ylim(axis_limits[1])
-    ax.set_zlim(axis_limits[2])
-
-    plt.axis('off')
-    plt.savefig(output_path, bbox_inches='tight', pad_inches=0)
-    plt.close()
-
-
-def create_video_from_images(image_folder, output_video_path, frame_rate):
-    images = [img for img in sorted(os.listdir(image_folder)) if img.endswith(".jpg")]
-    frame = cv2.imread(os.path.join(image_folder, images[0]))
-    height, width, layers = frame.shape
-    video = cv2.VideoWriter(output_video_path, cv2.VideoWriter_fourcc(*'mp4'), float(frame_rate), (width, height))
-    for image in images:
-        frame = cv2.imread(os.path.join(image_folder, image))
-        video.write(frame)
-    video.release()
-    print(f"视频已成功保存在:{output_video_path}")
-
-
-if __name__ == '__main__':
-    bag_base_name = sys.argv[1]
-    output_base_path = sys.argv[2]
-
-    print(f"base路径为:{bag_base_name}")
-    lidar_output_base = os.path.join(output_base_path, bag_base_name + '_pcd_lidar')
-    pcd_folder_path = os.path.join(lidar_output_base, 'pcd_ascii')
-    images_folder_path = os.path.join(lidar_output_base, 'images')
-    video_path = os.path.join(lidar_output_base, 'output_video.mp4')
-    frame_rate_file = os.path.join(output_base_path, bag_base_name + '_pcd_lidar', 'frame_rate.txt')
-
-    # 确保图片目录存在
-    if not os.path.exists(images_folder_path):
-        os.makedirs(images_folder_path)
-
-    # 读取帧率
-    with open(frame_rate_file, 'r') as file:
-        frame_rate = file.read().strip()
-
-    # Determine the axis limits based on all PCD files
-    axis_limits = find_global_min_max(pcd_folder_path)
-
-    # 处理每个PCD文件生成图像
-    pcd_files = [f for f in os.listdir(pcd_folder_path) if f.endswith('.pcd')]
-    for file_name in pcd_files:
-        pcd_file_path = os.path.join(pcd_folder_path, file_name)
-        image_file_path = os.path.join(images_folder_path, file_name.replace('.pcd', '.jpg'))
-        process_pcd_to_top_view_image(pcd_file_path, image_file_path, axis_limits)
-
-    # 从图像生成视频
-    create_video_from_images(images_folder_path, video_path, frame_rate)

+ 0 - 0
src/python2/pjibot/utils/__init__.py


+ 45 - 0
src/python2/pjibot/utils/json_utils.py

@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
+import json
+
+def parse_json_to_string_array(file_path):
+    try:
+        with open(file_path, 'r') as file:
+            file_content = file.read()
+            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
+
+        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
+        if isinstance(data, list):
+            for item in data:
+                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
+                    raise ValueError("JSON数组中的元素不是字符串")
+            return data
+        else:
+            return []
+    except Exception as e:
+        return []
+def list_to_json_file(data, file_path):
+    """
+    将列表转换为JSON格式并写入指定的文件路径。
+    如果文件已存在,则覆盖它。
+
+    参数:
+    data (list): 要转换为JSON的列表。
+    file_path (str): 要写入JSON数据的文件路径。
+    """
+    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
+    json_data = json.dumps(data, ensure_ascii=False, indent=4)
+    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
+
+    # 以写入模式打开文件,如果文件已存在则覆盖
+    with open(file_path, 'w') as file:
+        # 将UTF-8编码的JSON字符串写入文件
+        file.write(json_data_utf8)
+
+def add_error(parse_prefix,error_bag_json):
+    error_bag_list = parse_json_to_string_array(error_bag_json)
+    error_bag_list.append(parse_prefix)
+    error_bag_list = list(set(error_bag_list))
+    list_to_json_file(error_bag_list, error_bag_json)

+ 1 - 0
src/python2/pjibot_delivery/2callback-nohup.sh

@@ -5,4 +5,5 @@ if [ ! -d "./log" ]; then
 else
     echo "Directory './log' already exists."
 fi
+rm -rf log/2callback*
 nohup python2 2callback-pjibot_delivery.py > log/2callback-pjibot_delivery.out 2>&1 &

+ 23 - 21
src/python2/pjibot_delivery/2callback-pjibot_delivery.py

@@ -1,17 +1,15 @@
 # -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import json
 import time
 import urllib2
 import oss2
 from datetime import datetime, timedelta
-
 import logging
-
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
-
-logging.basicConfig(filename=path1 + 'log/2callback.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/2callback-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 key2 = 'data/'
 key3 = 'data_merge/'
@@ -55,7 +53,7 @@ if __name__ == '__main__':
                         file8 = False
                         file9 = False
                         for obj2 in oss2.ObjectIterator(bucket, prefix=prefix + '/'):
-                            if '/callback.json' in str(obj2.key):
+                            if '/callback_done.json' in str(obj2.key):
                                 file1 = True
                             if '/ego_pji.csv' in str(obj2.key):
                                 file2 = True
@@ -63,15 +61,15 @@ if __name__ == '__main__':
                                 file3 = True
                             if '/pcd_overlook.mp4' in str(obj2.key):
                                 file4 = True
-                            if '/pos_pji.csv' in str(obj2.key):
+                            if '/drive.csv' in str(obj2.key):
                                 file5 = True
                             if '/scenario_orig.mp4' in str(obj2.key):
                                 file7 = True
-                            if '/simulation.xosc' in str(obj2.key) or '/scenario.xosc' in str(obj2.key):
+                            if '/scenario_hmi.xosc' in str(obj2.key):
                                 file8 = True
                             if '/trajectory_pji.csv' in str(obj2.key):
                                 file9 = True
-                        if not file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
+                        if file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
                             continue
                         time.sleep(1)
                         logging.info("发送:%s", prefix)
@@ -102,6 +100,11 @@ if __name__ == '__main__':
                             old_ros_bag_path = json_object['rosBagPath']
                             task_id = json_object['taskId']
                             trigger_id = json_object['triggerId']
+                            check = json_object['check']
+                            check_order = ['自车数据缺失', '不在道路范围', '无规划路径', '目标点缺失','点云缺失', '点云丢帧', '解析程序错误', '还原程序错误', '评价程序错误']
+                            check_order_dict = dict((item, idx) for idx, item in enumerate(check_order))
+                            check = sorted(check, key=lambda x: check_order_dict.get(x, float('inf')))
+                            check = ','.join(check) # 数组元素拼接成字符串序列
                         except Exception as e:
                             logging.exception("callback报错:%s", str(e))
                             continue
@@ -111,14 +114,11 @@ if __name__ == '__main__':
                         # new_date = add_hour(old_date, 8)
                         new_date = old_date
                         old_delete_list = []
+                        callback_done_oss_key = ''
                         for obj_old in oss2.ObjectIterator(bucket, prefix=old_file_path):
                             old_delete_list.append(str(obj_old.key))
                             if 'callback.json' in str(obj_old.key):
-                                bucket.copy_object(bucket_name, str(obj_old.key),
-                                                   str(obj_old.key).replace(old_date, new_date).replace(
-                                                       'callback.json',
-                                                       'callback_done.json'))
-                                bucket.delete_object(str(obj_old.key))  # 删除 callback.json
+                                callback_done_oss_key = str(obj_old.key).replace(old_date, new_date).replace('callback.json','callback_done.json')
                         # todo 时区不变也就不需要移动文件了
                         #     else:
                         #         bucket.copy_object(bucket_name, str(obj_old.key),
@@ -137,7 +137,8 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
                         else:
                             logging.info("json_object 不包含 'userId' 字段")
@@ -148,20 +149,21 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
 
                         json_data2 = json.dumps(data2)
+                        bucket.put_object(callback_done_oss_key, unicode(json_data2))
                         logging.info("回调接口请求中:%s" % url2_private)
-                        request2 = urllib2.Request(url2_private, json_data2,
-                                                   headers={'Content-Type': 'application/json',
-                                                            'authorization': access_token})
+                        logging.info("回调接口发送参数为: %s" % str(data2))
+                        request2 = urllib2.Request(url2_private, json_data2,headers={'Content-Type': 'application/json','authorization': access_token})
                         response2 = urllib2.urlopen(request2)
                         result_json2 = response2.read()
                         result_object2 = json.loads(result_json2)
                         logging.info("回调接口请求结果为: %s", result_object2)
                 except Exception as e:
                     logging.exception("局部异常处理: %s" % str(e))
-            time.sleep(30)
+            time.sleep(10)
         except Exception as e:
             logging.exception("全局错误处理: %s" % str(e))

+ 11 - 0
src/python2/pjibot_delivery/2csv-errorBag.json

@@ -0,0 +1,11 @@
+[
+    "pjibot_delivery/ps001/data_parse/2024-10-23-14-10-24_obstacledetection_30/", 
+    "pjibot_delivery/ps001/data_parse/2024-10-23-14-10-25_obstacledetection_30/", 
+    "pjibot_delivery/ps001/data_parse/2024-10-23-14-10-26_obstacledetection_30/", 
+    "pjibot_delivery/ps001/data_parse/2024-10-23-14-10-27_obstacledetection_30/", 
+    "pjibot_delivery/ps001/data_parse/2024-10-24-15-48-07/", 
+    "pjibot_delivery/ps001/data_parse/VD100M6-BJ-Perception2024-10-24-15-49-34/", 
+    "pjibot_delivery/ps001/data_parse/VP100M23-BJ-movebase-2024-11-15-14-19-41/", 
+    "pjibot_delivery/ps001/data_parse/test_1126_01/", 
+    "pjibot_delivery/ps001/data_parse/mlx11281/"
+]

+ 1 - 0
src/python2/pjibot_delivery/2csv-nohup.sh

@@ -5,4 +5,5 @@ if [ ! -d "./log" ]; then
 else
     echo "Directory './log' already exists."
 fi
+rm -rf log/2csv*
 nohup python2 2csv-pjibot_delivery.py > log/2csv-pjibot_delivery.out 2>&1 &

+ 100 - 66
src/python2/pjibot_delivery/2csv-pjibot_delivery.py

@@ -1,107 +1,146 @@
 # -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
 import os
-import shutil
 import subprocess
 import time
 import oss2
-
-from resource import bagtocsv_robot
-
+import json
+import io
 import logging
-
+from resource import bagtocsv_robot
+from utils import json_utils
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 path2 = '/mnt/disk001/pdf_outdoor/run/'
-path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv/'
-logging.basicConfig(filename=path1 + 'log/2csv.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/2csv/'
+logging.basicConfig(filename=path1 + 'log/2csv-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
-sleep_time = 30  # 每多少秒扫描一次
+sleep_time = 10  # 每多少秒扫描一次
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/2csv-errorBag.json"
 
 
+    
 def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
     try:
         bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
         bagname = data_bag.split('/')[-1].split('.')[0]
-        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件的目录
+        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件和output.json的目录
 
+        # ------- 处理 output.json - 开始 -------
+        try:
+            output_json_path = str(local_csv_dir)+'/output.json'
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                callback_json_oss_key = parse_prefix+'callback.json'
+                callback_json_local = local_csv_dir+'/callback.json'
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                data['check'] = list(set(data['check'])) # 去重
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            pass
+        # ------- 处理 output.json - 结束 -------
         csv_file_name1 = 'trajectory_pji'
         local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
         oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        if os.path.exists(local_csv_file_path1):
+            bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        else:
+            logging.error("没有 trajectory_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name2 = 'ego_pji'
         local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
         oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        if os.path.exists(local_csv_file_path2):
+            bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        else:
+            logging.error("没有 ego_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
-        csv_file_name3 = 'pos_pji'
+        csv_file_name3 = 'targetposition'
         local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
         oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        if os.path.exists(local_csv_file_path3):
+            bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        else:
+            logging.error("没有 targetposition.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name4 = 'objects_pji'
         local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
         oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
-
-        # 生成pdf
-        try:
+        if os.path.exists(local_csv_file_path4):
+            bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
+        else:
+            logging.error("没有 objects_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        
+        csv_file_name5 = 'drive'
+        local_csv_file_path5 = str(local_csv_dir) + '/' + str(csv_file_name5) + '.csv'
+        oss_csv_object_key5 = parse_prefix + csv_file_name5 + '.csv'
+        if os.path.exists(local_csv_file_path5):
+            bucket.put_object_from_file(oss_csv_object_key5, local_csv_file_path5)
+        else:
+            logging.error("没有 drive.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+
+        # ------- 生成pdf - 开始 -------
+        pdf_local_path = str(local_csv_dir) + '/report.pdf'
+        can_pdf = True
+        for output in outputs:
+            if str(output) in ['自车数据缺失','无规划路径']:
+                logging.error("【自车数据缺失、无规划路径】导致无法生成评价报告PDF")
+                can_pdf = False
+        if can_pdf:
             os.chdir(path2)
-            # 构造命令
             command1 = ['./pji_outdoor_real',
                         os.path.join(local_csv_dir, ''),  # 注意:这里可能不需要末尾的 '/',取决于程序要求
                         os.path.join(local_csv_dir, ''),  # 同上
                         os.path.join(local_csv_dir, 'trajectory.png'),
                         bagname]
-
-            # 记录调用命令的信息
             logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
-
-            # 使用 subprocess.Popen 执行命令
             process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
             stdout, stderr = process.communicate()  # 等待命令执行完成
-
-            # 检查是否有输出或错误
             if stdout:
                 logging.info("命令的标准输出:")
                 logging.info(stdout.decode('utf-8'))  # Python 2 中需要解码
             if stderr:
                 logging.error("命令的错误输出:")
                 logging.error(stderr.decode('utf-8'))  # Python 2 中需要解码
-
-            # 检查命令是否成功执行
-            if process.returncode == 0:
-                logging.info("命令执行成功")
-            else:
+            if process.returncode != 0:
                 logging.error("命令执行失败,退出码: %s" % process.returncode)
-
-        except OSError as e:
-            # 如果更改目录失败或命令不存在等
-            logging.error("在执行过程中发生错误: %s" % e)
-
-        oss_csv_object_key5 = parse_prefix + 'report.pdf'
-        bucket.put_object_from_file(oss_csv_object_key5, str(local_csv_dir) + '/report.pdf')
-        logging.info("pdf 报告生成并上传完成。")
-
+            oss_csv_object_key5 = parse_prefix + 'report.pdf'
+            bucket.put_object_from_file(oss_csv_object_key5, pdf_local_path)
+            logging.info("pdf 报告生成并上传完成。")
+        # ------- 生成pdf - 结束 -------
+        
         # 记得删除
         local_delete_list.append(local_csv_file_path1)
         local_delete_list.append(local_csv_file_path2)
-        local_delete_list.append(local_csv_file_path3)
         local_delete_list.append(local_csv_file_path4)
-        local_delete_list.append(str(local_csv_dir) + '/report.pdf')
+        local_delete_list.append(output_json_path)
+        local_delete_list.append(pdf_local_path)
+        local_delete_list.append(str(local_csv_dir) + '/trajectory.png')
 
     except Exception as e2:
-        # 当出现异常时执行的代码
         logging.exception("生成csv报错: %s", e2)
+        json_utils.add_error(parse_prefix,error_bag_json)
 
-
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
     while True:
         logging.info("开始新一轮扫描:%s " % key1)
         try:
@@ -120,24 +159,23 @@ if __name__ == '__main__':
                                                                                                           '/')  # data_parse 目录
                     csv1_done = False
                     csv2_done = False
-                    csv3_done = False
                     csv4_done = False
-                    pdf_done = False
-                    for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):  # 判断 data_parse 目录下是否有解析后的文件
+                    csv5_done = False
+                    for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): # 判断 data_parse 目录下是否有解析后的文件
                         if '/trajectory_pji.csv' in str(obj2.key):
                             csv1_done = True
                         if '/ego_pji.csv' in str(obj2.key):
                             csv2_done = True
-                        if '/pos_pji.csv' in str(obj2.key):
-                            csv3_done = True
                         if '/objects_pji.csv' in str(obj2.key):
                             csv4_done = True
-                        if '/report.pdf' in str(obj2.key):
-                            pdf_done = True
-                    if csv1_done and csv2_done and csv3_done and csv4_done and pdf_done:
+                        if '/drive.csv' in str(obj2.key):
+                            csv5_done = True
+                    if csv1_done and csv2_done and csv4_done and csv5_done:
                         continue
-
-                    logging.info("开始生成场景还原csv: ", str(obj1.key))
+                    error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                    if parse_prefix_full in error_bag_list:
+                        continue
+                    logging.info("------- 生成场景还原csv - 开始: %s -------" % str(obj1.key))
                     local_merged_bag_path = path3 + merged_bag_object_key
                     local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
                     local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
@@ -147,22 +185,18 @@ if __name__ == '__main__':
                         os.makedirs(local_parse_dir)
                     merged_bag_full_name = merged_bag_object_key_split[-1]
                     merged_bag_name = merged_bag_full_name.split('.')[0]
-                    try:
-                        bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
-                    except Exception as e:
-                        logging.exception("下载合并后的bag包失败: %s" % str(e))
-
+                    bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
                     local_delete_list.append(local_merged_bag_path)
                     # 2 生成 pos_orig.csv 和 pos_hmi.csv
                     parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
-
+                    logging.info("------- 生成场景还原csv - 结束: %s -------" % str(obj1.key))
             # 删除本地临时文件
             if len(local_delete_list) > 0:
                 for local_delete in local_delete_list:
                     try:
                         os.remove(local_delete)
                     except Exception as e:
-                        logging.exception("捕获到一个异常: %s" % str(e))
+                        pass
         except Exception as e:
             logging.exception("全局错误处理: %s" % str(e))
         time.sleep(sleep_time)

+ 1 - 0
src/python2/pjibot_delivery/2pcd-nohup.sh

@@ -5,4 +5,5 @@ if [ ! -d "./log" ]; then
 else
     echo "Directory './log' already exists."
 fi
+rm -rf log/2pcd*
 nohup python2 2pcd-pjibot_delivery.py > log/2pcd-pjibot_delivery.out 2>&1 &

+ 21 - 18
src/python2/pjibot_delivery/2pcd-pjibot_delivery.py

@@ -1,19 +1,24 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import os
 import time
 import oss2
-
-from resource import pcdtovideo_monitor_overlook
-
 import logging
-
+from resource import pcdtovideo_monitor_overlook
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
-logging.basicConfig(filename=path1 + 'log/2pcd.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/2pcd-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 sleep_time = 30  # 每多少秒扫描一次
-
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/2pcd-errorBag.json"
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
+# ------- 全局配置 -------
 
 def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local_delete_list):
     global bucket
@@ -26,15 +31,14 @@ def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local
         logging.info("上传点云视频到: %s", oss_csv_object_key3)
         local_delete_list.append(local_mp4_path)
     except Exception as e:
+        local_delete_list.remove(merged_bag_file_path)
+        error_bag_list = json_utils.jsonparse_json_to_string_array(error_bag_json)
+        error_bag_list.append(parse_prefix2)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成点云视频报错: %s" % str(e))
 
 
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
     while True:
         logging.info("开始新一轮扫描")
         try:
@@ -52,16 +56,14 @@ if __name__ == '__main__':
                         parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
                         parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/'
                         pcd_done = False
-                        callback_done = False
                         for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):
-                            if '/callback.json' in str(obj2.key):
-                                callback_done = True
                             if '/pcd_overlook.mp4' in str(obj2.key):
                                 pcd_done = True
-                        if not callback_done:
-                            continue
                         if pcd_done:
                             continue
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                        if str(parse_prefix_full) in error_bag_list:
+                            continue
                         logging.info("%s 需要生成点云视频 pcd_overlook.mp4" % str(parse_prefix_full))
                         local_merged_bag_path = path1 + 'pcd/' + merged_bag_object_key
                         local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
@@ -73,6 +75,7 @@ if __name__ == '__main__':
                         merged_bag_full_name = merged_bag_object_key_split[-1]
                         merged_bag_name = merged_bag_full_name.split('.')[0]
                         bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
+                        logging.info("本地 bag 包路径为【%s】" % str(local_merged_bag_path))
                         local_delete_list.append(local_merged_bag_path)
                         # 2 生成 pos_orig.csv 和 pos_hmi.csv
                         parse_to_pcd_mp4(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)

+ 0 - 0
src/python2/pjibot_delivery/2xosc-errorBag.json


+ 2 - 1
src/python2/pjibot_delivery/2xosc-nohup.sh

@@ -5,4 +5,5 @@ if [ ! -d "./log" ]; then
 else
     echo "Directory './log' already exists."
 fi
-nohup python2 2xosc-pjibot_delivery.py > log/2xosc.out 2>&1 &
+rm -rf log/2xosc*
+nohup python2 2xosc-pjibot_delivery.py > log/2xosc-pjibot_delivery.out 2>&1 &

+ 61 - 21
src/python2/pjibot_delivery/2xosc-pjibot_delivery.py

@@ -1,40 +1,80 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import os
 import time
 import oss2
 import logging
-
+import json
+import subprocess
+import io
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 path2 = '/mnt/disk001/dcl_data_process/src/python3/pjibot_outdoor/'
-
-logging.basicConfig(filename=path1 + 'log/2xosc.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/2xosc-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 sleep_time = 30  # 每多少秒扫描一次
-
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/2xosc-errorBag.json"
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
 
 def generate_xosc(parse_prefix, local_parse_dir, local_delete_list):
     try:
         os.chdir(path2)
-        command2 = 'python3 jiqiren_outdoor.py ' + local_parse_dir[:-1] + ' 0'  # 配送机器人0 巡检机器人1
-        logging.info("进入目录 %s 调用命令2: %s", path2, str(command2))
-        os.system(command2)
+        command2 = 'python3 jiqiren_outdoor.py {} 0'.format(local_parse_dir[:-1])  # 配送机器人0 巡检机器人1
+        logging.info("进入目录 %s 调用命令2: %s", path2, command2)
+        process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = process.communicate()  # 等待进程执行完成并获取输出
+        if process.returncode == 0:
+            logging.info("命令输出: %s", stdout.decode("utf-8"))
+        else:
+            logging.error("命令执行失败,错误码: %d", process.returncode)
+            logging.error("命令错误输出: %s", stderr.decode("utf-8"))
         local_xosc_path2 = local_parse_dir + 'simulation/xosc/openx_outdoor0.xosc'
         bucket.put_object_from_file(parse_prefix + 'scenario.xosc', local_xosc_path2)
         bucket.put_object_from_file(parse_prefix + 'scenario_hmi.xosc', local_xosc_path2)
         logging.info("上传 scenario_hmi.xosc 成功: %s" % str(parse_prefix + 'scenario.xosc'))
+        
+        # ------- 处理 output.json - 开始 -------
+        output_json_path = local_parse_dir + 'simulation/xosc/output.json'
+        callback_json_oss_key = parse_prefix+'callback.json'
+        callback_json_local = local_parse_dir+'/callback.json'
+        try:
+            # 1 解析 output.json
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            # todo 可能没有callback.json,已经处理成 callback_done.json了,暂时不管
+            logging.exception("处理 output.json报错: %s" % str(e3))
+            pass
+        # ------- 处理 output.json - 结束 -------
+        
+        
+        # 处理删除
         local_delete_list.append(local_xosc_path2)
+        local_delete_list.append(callback_json_local)
     except Exception as e:
+        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+        error_bag_list.append(parse_prefix)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成xosc报错: %s" % str(e))
 
-
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'open-bucket')
     while True:
         try:
             logging.info("开始新一轮扫描")
@@ -59,23 +99,23 @@ if __name__ == '__main__':
                                 xosc_done = True
                             if '/objects_pji.csv' in str(obj3.key):
                                 csv1_done = True
-                            if '/pos_pji.csv' in str(obj3.key):
+                            if '/ego_pji.csv' in str(obj3.key):
                                 csv2_done = True
                         if xosc_done:
-                            # logging.info("存在 simulation.xosc(scenario.xosc): %s" % str(parse_prefix_full))
                             continue
                         if not csv1_done:
-                            # logging.info("不存在 /objects_pji.csv: %s" % str(parse_prefix_full))
                             continue
                         if not csv2_done:
-                            # logging.info("不存在 /pos_pji.csv: %s" % str(parse_prefix_full))
+                            continue
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                        if str(parse_prefix_full) in error_bag_list:
                             continue
                         logging.info("需要生成 scenario_hmi.xosc: %s" % str(parse_prefix_full))
                         local_dir_full = path1 + parse_prefix_full
                         if not os.path.exists(local_dir_full):
                             os.makedirs(local_dir_full)
-                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv', local_dir_full + 'objects_pji.csv')
-                        bucket.get_object_to_file(parse_prefix_full+'pos_pji.csv', local_dir_full+'pos_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv',local_dir_full + 'objects_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'ego_pji.csv', local_dir_full + 'ego_pji.csv')
                         generate_xosc(parse_prefix_full, local_dir_full, local_delete_list)
                 except Exception as e:
                     logging.exception("局部异常处理: %s", str(e))

+ 21 - 14
src/python2/pjibot_delivery/callback-pjibot_delivery.py

@@ -1,4 +1,7 @@
 # -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import json
 import time
 import urllib2
@@ -55,7 +58,7 @@ if __name__ == '__main__':
                         file8 = False
                         file9 = False
                         for obj2 in oss2.ObjectIterator(bucket, prefix=prefix+ '/'):
-                            if '/callback.json' in str(obj2.key):
+                            if '/callback_done.json' in str(obj2.key):
                                 file1 = True
                             if '/ego_pji.csv' in str(obj2.key):
                                 file2 = True
@@ -63,15 +66,15 @@ if __name__ == '__main__':
                                 file3 = True
                             if '/pcd_overlook.mp4' in str(obj2.key):
                                 file4 = True
-                            if '/pos_pji.csv' in str(obj2.key):
+                            if '/drive.csv' in str(obj2.key):
                                 file5 = True
                             if '/scenario_orig.mp4' in str(obj2.key):
                                 file7 = True
-                            if '/simulation.xosc' in str(obj2.key) or '/scenario.xosc' in str(obj2.key):
+                            if '/scenario_hmi.xosc' in str(obj2.key):
                                 file8 = True
                             if '/trajectory_pji.csv' in str(obj2.key):
                                 file9 = True
-                        if not file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
+                        if file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
                             continue
                         time.sleep(1)
                         logging.info("发送:%s", prefix)
@@ -103,6 +106,11 @@ if __name__ == '__main__':
                             old_ros_bag_path = json_object['rosBagPath']
                             task_id = json_object['taskId']
                             trigger_id = json_object['triggerId']
+                            check = json_object['check']
+                            check_order = ['自车数据缺失', '不在道路范围', '无规划路径', '目标点缺失','点云缺失', '点云丢帧', '解析程序错误', '还原程序错误', '评价程序错误']
+                            check_order_dict = dict((item, idx) for idx, item in enumerate(check_order))
+                            check = sorted(check, key=lambda x: check_order_dict.get(x, float('inf')))
+                            check = ','.join(check) # 数组元素拼接成字符串序列
                         except Exception as e:
                             logging.exception("callback报错:%s", str(e))
                             continue
@@ -112,14 +120,11 @@ if __name__ == '__main__':
                         # new_date = add_hour(old_date, 8)
                         new_date = old_date
                         old_delete_list = []
+                        callback_done_oss_key = ''
                         for obj_old in oss2.ObjectIterator(bucket, prefix=old_file_path):
                             old_delete_list.append(str(obj_old.key))
                             if 'callback.json' in str(obj_old.key):
-                                bucket.copy_object(bucket_name, str(obj_old.key),
-                                                   str(obj_old.key).replace(old_date, new_date).replace(
-                                                       'callback.json',
-                                                       'callback_done.json'))
-                                bucket.delete_object(str(obj_old.key))  # 删除 callback.json
+                                callback_done_oss_key = str(obj_old.key).replace(old_date, new_date).replace('callback.json','callback_done.json')
                         # todo 时区不变也就不需要移动文件了
                         #     else:
                         #         bucket.copy_object(bucket_name, str(obj_old.key),
@@ -138,7 +143,8 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
                         else:
                             logging.info("json_object 不包含 'userId' 字段")
@@ -149,13 +155,14 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
                         json_data2 = json.dumps(data2)
+                        bucket.put_object(callback_done_oss_key, json_data2)
                         logging.info("回调接口请求中:%s" % url2_private)
-                        request2 = urllib2.Request(url2_private, json_data2,
-                                                   headers={'Content-Type': 'application/json',
-                                                            'authorization': access_token})
+                        logging.info("回调接口发送参数为: %s" % str(data2))
+                        request2 = urllib2.Request(url2_private, json_data2,headers={'Content-Type': 'application/json','authorization': access_token})
                         response2 = urllib2.urlopen(request2)
                         result_json2 = response2.read()
                         result_object2 = json.loads(result_json2)

+ 2 - 2
src/python2/pjibot_delivery/camera-pjibot_delivery.py

@@ -9,11 +9,11 @@ import logging
 
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 
-logging.basicConfig(filename=path1 + 'log/camera.log', level=logging.INFO,
+logging.basicConfig(filename=path1 + 'log/camera-pjibot_delivery.log', level=logging.INFO,
                     format='%(asctime)s - %(levelname)s - %(message)s')
 
 key1 = 'pjibot_delivery/'
-sleep_time = 30  # 每多少秒扫描一次
+sleep_time = 10  # 每多少秒扫描一次
 
 
 def parse_to_mp4(merged_bag_file_path, parse_prefix1, local_parse_dir2, local_delete_list3):

+ 139 - 1
src/python2/pjibot_delivery/csv-errorBag.json

@@ -164,5 +164,143 @@
     "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-10-24-15-48-07/", 
     "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/test_1128/", 
     "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/pji-2024-10-24-15-48-07/", 
-    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/36110-2024-10-24-15-49-34/"
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩前倾10°_2024-09-29-15-47-59/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩前方障碍物穿梭2024-09-29-15-32-26/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩前方障碍物遮挡2024-09-29-15-31-10/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩右偏斜30°_2024-09-29-15-43-02/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩右偏移15cm_2024-09-29-15-40-28/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩左偏移30°_2024-09-29-15-45-29/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩有电 轮廓遮挡2024-09-29-15-30-25/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩没电 轮廓遮挡2024-09-29-15-27-25/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/充电桩没电2024-09-29-11-39-55/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/障碍物跟随2024-09-29-15-34-00/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/36110-2024-10-24-15-49-34/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M20-BJ-dock-2024-11-14-18-31-39/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-12-11-29-38/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-12-16-46-00/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-09-38/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-13-28/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-17-58/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-19-08/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-21-51/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-11-22-34/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-14-41-18/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-14-42-27/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-14-43-31/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-15-00-23/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-15-01-53/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-15-21-24/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-15-23-22/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-14-15-27-01/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-09-55-03/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-09-56-08/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-03-19/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-16-24/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-17-10/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-17-51/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-21-10/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-09-20-10-21-43/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-09-38/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-11-59/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-13-08/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-14-16/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-15-16/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-15-56/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-16-33/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-17-11/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-17-39/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-18-15/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-19-14/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-21-09/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-22-41/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-24-08/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-25-26/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-27-42/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-28-56/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-22-17-29-48/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-14-37/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-15-36/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-16-14/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-16-46/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-17-43/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-18-26/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-19-37/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-20-30/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-22-34/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-32-20/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-34-43/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-44-45/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-45-23/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-46-03/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-46-40/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-47-16/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-48-25/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-49-08/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-50-00/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-51-26/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-52-46/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-14-55-20/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-00-26/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-01-26/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-02-28/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-03-20/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-04-06/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-04-43/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-10-23-15-05-51/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-45-20/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-47-50/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-50-25/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-51-19/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-52-58/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-17-53-56/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-15-05/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-15-55/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-17-20/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-20-23/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-22-37/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-23-27/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-24-41/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-14-18-25-42/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-15-14-49-45/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-15-14-50-40/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-15-14-51-37/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VD100M21-BJ-movebase-2024-11-15-14-52-29/", 
+    "pjibot_delivery/pjibot-P1YVPS1M22CM00020/data_parse/VP100M23-BJ-movebase-2024-11-15-14-19-41/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-09-12-14-24-23_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-09-13-09-23-24_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-10-22-15-27-44_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-10-23-09-31-34_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-10-25-09-58-21_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-04-15-23-40_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-04-15-38-12_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-04-16-05-07_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-04-16-12-48/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-04-16-12-48_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-05-16-05-51_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-05-18-08-47_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-12-17-25-04_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-12-17-29-31_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-12-17-30-49_obstacledetection_3/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/2024-11-12-17-38-35_obstacledetection_30/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-12-24/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-15-01/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-18-40/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-20-45/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-24-03/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-25-48/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M21-BJ-dock-2024-09-26-10-37-20/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-26-31/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-27-50/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-29-07/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-31-59/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-33-01/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-33-52/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-36-33/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-37-51/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-38-55/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-39-44/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-43-11/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-45-05/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-46-29/", 
+    "pjibot_delivery/pjibot-P1YYPS1M227M00107/data_parse/VD100M6-BJ-Perception2024-11-08-16-48-55/"
 ]

+ 1 - 1
src/python2/pjibot_delivery/csv-nohup.sh

@@ -5,5 +5,5 @@ if [ ! -d "./log" ]; then
 else
     echo "Directory './log' already exists."
 fi
-rm -rf log/csv-pjibot_delivery.out log/csv-pjibot_delivery.log
+rm -rf log/csv.out log/csv.log log/csv-pjibot_delivery.out log/csv-pjibot_delivery.log
 nohup python2 csv-pjibot_delivery.py > log/csv-pjibot_delivery.out 2>&1 &

+ 94 - 106
src/python2/pjibot_delivery/csv-pjibot_delivery.py

@@ -1,152 +1,147 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
 import os
-import shutil
 import subprocess
 import time
 import oss2
 import json
-
-from resource import bagtocsv_robot
-
+import io
 import logging
-
+from resource import bagtocsv_robot
+from utils import json_utils
+#  创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 path2 = '/mnt/disk001/pdf_outdoor/run/'
 path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv/'
-logging.basicConfig(filename=path1 + 'log/csv-pjibot_delivery.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/csv-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 sleep_time = 30  # 每多少秒扫描一次
 error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/csv-errorBag.json"
-
-
-def parse_json_to_string_array(file_path):
-    try:
-        # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
-        with open(file_path, 'r') as file:
-            # 读取文件内容
-            file_content = file.read()
-            # 解析JSON内容(Python 2中json.loads用于解析字符串)
-            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
-
-        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
-        if isinstance(data, list):
-            for item in data:
-                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
-                    raise ValueError("JSON数组中的元素不是字符串")
-            return data
-        else:
-            return []
-    except Exception as e:
-        return []
-
-
-def list_to_json_file(data, file_path):
-    """
-    将列表转换为JSON格式并写入指定的文件路径。
-    如果文件已存在,则覆盖它。
-
-    参数:
-    data (list): 要转换为JSON的列表。
-    file_path (str): 要写入JSON数据的文件路径。
-    """
-    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
-    json_data = json.dumps(data, ensure_ascii=False, indent=4)
-    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
-
-    # 以写入模式打开文件,如果文件已存在则覆盖
-    with open(file_path, 'w') as file:
-        # 将UTF-8编码的JSON字符串写入文件
-        file.write(json_data_utf8)
+# ------- 全局配置 -------
 
 
 def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
     try:
         bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
         bagname = data_bag.split('/')[-1].split('.')[0]
-        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件的目录
+        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件和output.json的目录
 
+        # ------- 处理 output.json - 开始 -------
+        try:
+            output_json_path = str(local_csv_dir)+'/output.json'
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                callback_json_oss_key = parse_prefix+'callback.json'
+                callback_json_local = local_csv_dir+'/callback.json'
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                data['check'] = list(set(data['check'])) # 去重
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            pass
+        # ------- 处理 output.json - 结束 -------
         csv_file_name1 = 'trajectory_pji'
         local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
         oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        if os.path.exists(local_csv_file_path1):
+            bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        else:
+            logging.error("没有 trajectory_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name2 = 'ego_pji'
         local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
         oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        if os.path.exists(local_csv_file_path2):
+            bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        else:
+            logging.error("没有 ego_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
-        csv_file_name3 = 'pos_pji'
+        csv_file_name3 = 'targetposition'
         local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
         oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        if os.path.exists(local_csv_file_path3):
+            bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        else:
+            logging.error("没有 targetposition.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name4 = 'objects_pji'
         local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
         oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
-
-        # 生成pdf
-        try:
-            # 设置调用目录
+        if os.path.exists(local_csv_file_path4):
+            bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
+        else:
+            logging.error("没有 objects_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        
+        csv_file_name5 = 'drive'
+        local_csv_file_path5 = str(local_csv_dir) + '/' + str(csv_file_name5) + '.csv'
+        oss_csv_object_key5 = parse_prefix + csv_file_name5 + '.csv'
+        if os.path.exists(local_csv_file_path5):
+            bucket.put_object_from_file(oss_csv_object_key5, local_csv_file_path5)
+        else:
+            logging.error("没有 drive.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+
+        # ------- 生成pdf - 开始 -------
+        pdf_local_path = str(local_csv_dir) + '/report.pdf'
+        can_pdf = True
+        for output in outputs:
+            if str(output) in ['自车数据缺失','无规划路径']:
+                logging.error("【自车数据缺失、无规划路径】导致无法生成评价报告PDF")
+                can_pdf = False
+        if can_pdf:
             os.chdir(path2)
-            # 构造命令
             command1 = ['./pji_outdoor_real',
                         os.path.join(local_csv_dir, ''),  # 注意:这里可能不需要末尾的 '/',取决于程序要求
                         os.path.join(local_csv_dir, ''),  # 同上
                         os.path.join(local_csv_dir, 'trajectory.png'),
                         bagname]
-
-            # 记录调用命令的信息
             logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
-
-            # 使用 subprocess.Popen 执行命令
             process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
             stdout, stderr = process.communicate()  # 等待命令执行完成
-
-            # 检查是否有输出或错误
             if stdout:
                 logging.info("命令的标准输出:")
                 logging.info(stdout.decode('utf-8'))  # Python 2 中需要解码
             if stderr:
                 logging.error("命令的错误输出:")
                 logging.error(stderr.decode('utf-8'))  # Python 2 中需要解码
-
-            # 检查命令是否成功执行
-            if process.returncode == 0:
-                logging.info("命令执行成功")
-            else:
+            if process.returncode != 0:
                 logging.error("命令执行失败,退出码: %s" % process.returncode)
-
-        except OSError as e:
-            # 如果更改目录失败或命令不存在等
-            logging.error("在执行过程中发生错误: %s" % e)
-
-        oss_csv_object_key5 = parse_prefix + 'report.pdf'
-        bucket.put_object_from_file(oss_csv_object_key5, str(local_csv_dir) + '/report.pdf')
-        logging.info("pdf 报告生成并上传完成。")
-
+            oss_csv_object_key5 = parse_prefix + 'report.pdf'
+            bucket.put_object_from_file(oss_csv_object_key5, pdf_local_path)
+            logging.info("pdf 报告生成并上传完成。")
+        # ------- 生成pdf - 结束 -------
+        
         # 记得删除
         local_delete_list.append(local_csv_file_path1)
         local_delete_list.append(local_csv_file_path2)
-        local_delete_list.append(local_csv_file_path3)
         local_delete_list.append(local_csv_file_path4)
-        local_delete_list.append(str(local_csv_dir) + '/report.pdf')
+        local_delete_list.append(output_json_path)
+        local_delete_list.append(pdf_local_path)
+        local_delete_list.append(str(local_csv_dir) + '/trajectory.png')
 
     except Exception as e2:
-        error_bag_list = parse_json_to_string_array(error_bag_json)
-        error_bag_list.append(parse_prefix)
-        list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成csv报错: %s", e2)
+        json_utils.add_error(parse_prefix,error_bag_json)
 
-
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         logging.info("开始新一轮扫描:%s " % key1)
         try:
@@ -165,26 +160,23 @@ if __name__ == '__main__':
                                                                                                           '/')  # data_parse 目录
                     csv1_done = False
                     csv2_done = False
-                    csv3_done = False
                     csv4_done = False
-                    pdf_done = False
+                    csv5_done = False
                     for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): # 判断 data_parse 目录下是否有解析后的文件
                         if '/trajectory_pji.csv' in str(obj2.key):
                             csv1_done = True
                         if '/ego_pji.csv' in str(obj2.key):
                             csv2_done = True
-                        if '/pos_pji.csv' in str(obj2.key):
-                            csv3_done = True
                         if '/objects_pji.csv' in str(obj2.key):
                             csv4_done = True
-                        if '/report.pdf' in str(obj2.key):
-                            pdf_done = True
-                    if csv1_done and csv2_done and csv3_done and csv4_done and pdf_done:
+                        if '/drive.csv' in str(obj2.key):
+                            csv5_done = True
+                    if csv1_done and csv2_done and csv4_done and csv5_done:
                         continue
-                    error_bag_list = parse_json_to_string_array(error_bag_json)
+                    error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
                     if parse_prefix_full in error_bag_list:
                         continue
-                    logging.info("开始生成场景还原csv: %s" % str(obj1.key))
+                    logging.info("------- 生成场景还原csv - 开始: %s -------" % str(obj1.key))
                     local_merged_bag_path = path3 + merged_bag_object_key
                     local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
                     local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
@@ -194,22 +186,18 @@ if __name__ == '__main__':
                         os.makedirs(local_parse_dir)
                     merged_bag_full_name = merged_bag_object_key_split[-1]
                     merged_bag_name = merged_bag_full_name.split('.')[0]
-                    try:
-                        bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
-                    except Exception as e:
-                        logging.exception("下载合并后的bag包失败: %s" % str(e))
-
+                    bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
                     local_delete_list.append(local_merged_bag_path)
                     # 2 生成 pos_orig.csv 和 pos_hmi.csv
                     parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
-
+                    logging.info("------- 生成场景还原csv - 结束: %s -------" % str(obj1.key))
             # 删除本地临时文件
             if len(local_delete_list) > 0:
                 for local_delete in local_delete_list:
                     try:
                         os.remove(local_delete)
                     except Exception as e:
-                        logging.exception("捕获到一个异常: %s" % str(e))
+                        pass
         except Exception as e:
             logging.exception("全局错误处理: %s" % str(e))
         time.sleep(sleep_time)

+ 12 - 77
src/python2/pjibot_delivery/pcd-pjibot_delivery.py

@@ -1,77 +1,24 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
 import sys
 reload(sys)
 sys.setdefaultencoding("utf-8")
 import os
 import time
 import oss2
-import json
-
-from resource import pcdtovideo_monitor_overlook
-
 import logging
-
+from resource import pcdtovideo_monitor_overlook
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
-logging.basicConfig(filename=path1 + 'log/pcd-pjibot_delivery.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/pcd-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 sleep_time = 10  # 每多少秒扫描一次
 error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/pcd-errorBag.json"
-
-
-def parse_json_to_string_array(file_path):
-    try:
-        # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
-        with open(file_path, 'r') as file:
-            # 读取文件内容
-            file_content = file.read()
-            # 解析JSON内容(Python 2中json.loads用于解析字符串)
-            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
-
-        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
-        if isinstance(data, list):
-            for item in data:
-                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
-                    raise ValueError("JSON数组中的元素不是字符串")
-            return data
-        else:
-            return []
-    except Exception as e:
-        return []
-
-
-def list_to_json_file(data, file_path):
-    """
-    将列表转换为JSON格式并写入指定的文件路径。
-    如果文件已存在,则覆盖它。
-
-    参数:
-    data (list): 要转换为JSON的列表。
-    file_path (str): 要写入JSON数据的文件路径。
-    """
-        # 确保所有字符串都是 Unicode 类型
-    def ensure_unicode(obj):
-        if isinstance(obj, dict):
-            return {ensure_unicode(k): ensure_unicode(v) for k, v in obj.items()}
-        elif isinstance(obj, list):
-            return [ensure_unicode(i) for i in obj]
-        elif isinstance(obj, str):  # Python 2 的 `str`
-            return obj.decode('utf-8')  # 解码为 Unicode
-        else:
-            return obj
-
-    # 转换数据
-    data = ensure_unicode(data)
-    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
-    json_data = json.dumps(data, ensure_ascii=False, indent=4)
-    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
-
-    # 以二进制写入模式打开文件
-    with open(file_path, 'wb') as file:  # 使用 'wb' 模式
-        # 将UTF-8编码的JSON字符串写入文件
-        file.write(json_data_utf8)
-
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
+# ------- 全局配置 -------
 
 def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local_delete_list):
     global bucket
@@ -85,25 +32,13 @@ def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local
         local_delete_list.append(local_mp4_path)
     except Exception as e:
         local_delete_list.remove(merged_bag_file_path)
-        error_bag_list = parse_json_to_string_array(error_bag_json)
+        error_bag_list = json_utils.jsonparse_json_to_string_array(error_bag_json)
         error_bag_list.append(parse_prefix2)
-        list_to_json_file(error_bag_list, error_bag_json)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成点云视频报错: %s" % str(e))
 
 
-'''
-cname:http://open-bucket.oss.icvdc.com
-内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
-oss桶名: open-bucket
-keyid:n8glvFGS25MrLY7j
-secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
-'''
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         logging.info("开始新一轮扫描")
         try:
@@ -126,7 +61,7 @@ if __name__ == '__main__':
                                 pcd_done = True
                         if pcd_done:
                             continue
-                        error_bag_list = parse_json_to_string_array(error_bag_json)
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
                         if str(parse_prefix_full) in error_bag_list:
                             continue
                         logging.info("%s 需要生成点云视频 pcd_overlook.mp4" % str(parse_prefix_full))

+ 34 - 26
src/python2/pjibot_delivery/resource/bagtocsv_robot.py

@@ -56,7 +56,11 @@ def parsehancheng(input_dir, output_dir):
         drive_file = open(output_dir + "/"+"drive.csv", 'w')
         writer_drive = csv.writer(drive_file)
         writer_drive.writerow(dic_drive)
-        
+
+        dic_targetposition=['Time', 'x', 'y', 'z', 'h']
+        targetposition_file = open(output_dir + "/"+"targetposition.csv", 'w')
+        writer_targetposition = csv.writer(targetposition_file)
+        writer_targetposition.writerow(dic_targetposition)        
     
         #dic_robot_pos = ['Time','simtime','FrameID','HeadingAngle','X', 'Y' ,'Z','latitude','longitude']
         #robot_pos_file = open(output_dir + "/"+"pos_pji.csv", 'w')
@@ -110,6 +114,7 @@ def parsehancheng(input_dir, output_dir):
             robot_pose_lost_flag=True
             final_trajectorye_lost_flag=True
             gnss_lost_flag=True
+            targetposition_lost_flag=True
             
             pcd_exist_flag=False
             
@@ -117,14 +122,32 @@ def parsehancheng(input_dir, output_dir):
             #用来判断机器人点云/图像/规划/定位是否丢帧↑↑↑
             
             for topic,msg,t in bag.read_messages(topics=['/wheel','/obstacle_detection','/wheel_odom','/cmd_vel','/robot_pose','/tracking/objects','/nav/task_feedback_info',
-                                                         '/robot/final_trajectory','/gnss','/image_raw','/velodyne_points']):   
+                                                         '/robot/final_trajectory','/gnss','/image_raw','/velodyne_points','/robot/targetposition']):   
                 
                 if first_message_time is None:  
                     first_message_time = t
                     first_message_time = rospy.Time.to_sec(first_message_time)
                     first_message_time = datetime.fromtimestamp(first_message_time)
                 
-                
+                if topic == "/robot/targetposition":
+                    targetposition_lost_flag=False
+                    X=msg.pose.position.x
+
+                    Y=msg.pose.position.y
+
+                    Z=msg.pose.position.z
+                    OX=msg.pose.orientation.x
+                    OY=msg.pose.orientation.y
+                    OZ=msg.pose.orientation.z
+                    OW=msg.pose.orientation.w
+                    H=quaternion_to_euler(OX,OY,OZ,OW)
+                    message_targetposition=[str(t)[:-6],X,Y,Z,H]
+                    writer_targetposition.writerow(message_targetposition)
+                   
+                    
+                    
+                    
+                    
                     
                 if topic == "/velodyne_points":
                     pcd_exist_flag=True
@@ -275,6 +298,8 @@ def parsehancheng(input_dir, output_dir):
             objects_file.close()
             EgoState_file.close()
             trajectory_file.close()
+            targetposition_file.close()
+            drive_file.close()
             
         
         with open(json_path, "w") as file:
@@ -288,7 +313,8 @@ def parsehancheng(input_dir, output_dir):
                 '''
             else:
                 data.append('点云缺失')
-                
+            if targetposition_lost_flag: 
+                data.append('目标点缺失')
             if robot_pose_lost_flag or gnss_lost_flag:
                 data.append('自车数据缺失')
             if final_trajectorye_lost_flag:
@@ -308,33 +334,15 @@ def parsehancheng(input_dir, output_dir):
         
         
 
-# if __name__ == "__main__":
-#    #input_dir='/home/dell/下载/VD100M6-BJ-Perception2024-10-24-15-48-07.bag'
-#    #output_dir='/home/dell/下载'
+def parse(input_dir, output_dir):
+   #input_dir='/media/dell/HIKSEMI1/2024-12-03-10-37-41.bag'
+   #output_dir='/media/dell/HIKSEMI1'
 #    input_dir=sys.argv[1]
 #    output_dir = sys.argv[2]
-#    bagname=input_dir.split('/')[-1].split('.bag')[0]
-
-   
-#    output_dir=os.path.join(output_dir, bagname)
-#    if not os.path.exists(output_dir):
-#        os.makedirs(output_dir)
-#    parsehancheng(input_dir, output_dir)
-
-
-
-
-# if __name__ == '__main__':
-def parse(input_dir, output_dir):
-    # input_dir='/media/dell/HIKSEMI/pji_DGNC/pjioutrobot_2024-08-21-15-12-04.bag'
-    # output_dir='/media/dell/HIKSEMI/pji_DGNC'
-    # input_dir=sys.argv[1]
-    # output_dir = sys.argv[2]
    bagname=input_dir.split('/')[-1].split('.bag')[0]
 
    
    output_dir=os.path.join(output_dir, bagname)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
-   parsehancheng(input_dir, output_dir)
-
+   parsehancheng(input_dir, output_dir)

+ 2 - 3
src/python2/pjibot_delivery/simulation-pjibot_delivery.py

@@ -13,10 +13,9 @@ path2 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 
 vehicle_name = 'PuJin_distribution'  # 配送 PuJin_distribution 巡检 PuJin_patrol_robot
 xoscName = 'scenario.xosc'
-logging.basicConfig(filename=path2 + 'log/simulation.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
+logging.basicConfig(filename=path2 + 'log/simulation-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 
-sleep_time = 30  # 每多少秒扫描一次
+sleep_time = 10  # 每多少秒扫描一次
 
 
 def move_xosc_before_simulation(root_path):

+ 0 - 0
src/python2/pjibot_delivery/utils/__init__.py


+ 44 - 0
src/python2/pjibot_delivery/utils/json_utils.py

@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
+import json
+
+def parse_json_to_string_array(file_path):
+    try:
+        with open(file_path, 'r') as file:
+            file_content = file.read()
+            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
+
+        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
+        if isinstance(data, list):
+            for item in data:
+                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
+                    raise ValueError("JSON数组中的元素不是字符串")
+            return data
+        else:
+            return []
+    except Exception as e:
+        return []
+def list_to_json_file(data, file_path):
+    """
+    将列表转换为JSON格式并写入指定的文件路径。
+    如果文件已存在,则覆盖它。
+
+    参数:
+    data (list): 要转换为JSON的列表。
+    file_path (str): 要写入JSON数据的文件路径。
+    """
+    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
+    json_data = json.dumps(data, ensure_ascii=False, indent=4)
+    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
+
+    # 以写入模式打开文件,如果文件已存在则覆盖
+    with open(file_path, 'w') as file:
+        # 将UTF-8编码的JSON字符串写入文件
+        file.write(json_data_utf8)
+
+def add_error(parse_prefix,error_bag_json):
+    error_bag_list = parse_json_to_string_array(error_bag_json)
+    error_bag_list.append(parse_prefix)
+    list_to_json_file(error_bag_list, error_bag_json)

+ 51 - 95
src/python2/pjibot_delivery/xosc-pjibot_delivery.py

@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
 import sys
 reload(sys)
 sys.setdefaultencoding("utf-8")
@@ -8,115 +9,73 @@ import oss2
 import logging
 import json
 import subprocess
-
+import io
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/'
 path2 = '/mnt/disk001/dcl_data_process/src/python3/pjibot_outdoor/'
-
-logging.basicConfig(filename=path1 + 'log/xosc-pjibot_delivery.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/xosc-pjibot_delivery.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_delivery/'
 sleep_time = 10  # 每多少秒扫描一次
 error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/xosc-errorBag.json"
-
-
-def parse_json_to_string_array(file_path):
-    try:
-        # 打开并读取JSON文件(Python 2中不支持encoding参数,需要使用codecs模块或处理文件读取后的编码)
-        with open(file_path, 'r') as file:
-            # 读取文件内容
-            file_content = file.read()
-            # 解析JSON内容(Python 2中json.loads用于解析字符串)
-            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
-
-        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
-        if isinstance(data, list):
-            for item in data:
-                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
-                    raise ValueError("JSON数组中的元素不是字符串")
-            return data
-        else:
-            return []
-    except Exception as e:
-        return []
-
-
-def list_to_json_file(data, file_path):
-    """
-    将列表转换为JSON格式并写入指定的文件路径。
-    如果文件已存在,则覆盖它。
-
-    参数:
-    data (list): 要转换为JSON的列表。
-    file_path (str): 要写入JSON数据的文件路径。
-    """
-        # 确保所有字符串都是 Unicode 类型
-    def ensure_unicode(obj):
-        if isinstance(obj, dict):
-            return {ensure_unicode(k): ensure_unicode(v) for k, v in obj.items()}
-        elif isinstance(obj, list):
-            return [ensure_unicode(i) for i in obj]
-        elif isinstance(obj, str):  # Python 2 的 `str`
-            return obj.decode('utf-8')  # 解码为 Unicode
-        else:
-            return obj
-
-    # 转换数据
-    data = ensure_unicode(data)
-    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
-    json_data = json.dumps(data, ensure_ascii=False, indent=4)
-    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
-
-    # 以二进制写入模式打开文件
-    with open(file_path, 'wb') as file:  # 使用 'wb' 模式
-        # 将UTF-8编码的JSON字符串写入文件
-        file.write(json_data_utf8)
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
+# ------- 全局配置 -------
 
 def generate_xosc(parse_prefix, local_parse_dir, local_delete_list):
     try:
-        # 进入指定目录
         os.chdir(path2)
-
-        # 构造命令
         command2 = 'python3 jiqiren_outdoor.py {} 0'.format(local_parse_dir[:-1])  # 配送机器人0 巡检机器人1
         logging.info("进入目录 %s 调用命令2: %s", path2, command2)
-
-        # 使用 subprocess.Popen 运行命令并捕获输出
-        try:
-            process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            stdout, stderr = process.communicate()  # 等待进程执行完成并获取输出
-            if process.returncode == 0:
-                logging.info("命令输出: %s", stdout.decode("utf-8"))
-            else:
-                logging.error("命令执行失败,错误码: %d", process.returncode)
-                logging.error("命令错误输出: %s", stderr.decode("utf-8"))
-        except Exception as e:
-            logging.error("命令执行过程中发生异常: %s", e)
+        process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = process.communicate()  # 等待进程执行完成并获取输出
+        if process.returncode == 0:
+            logging.info("命令输出: %s", stdout.decode("utf-8"))
+        else:
+            logging.error("命令执行失败,错误码: %d", process.returncode)
+            logging.error("命令错误输出: %s", stderr.decode("utf-8"))
         local_xosc_path2 = local_parse_dir + 'simulation/xosc/openx_outdoor0.xosc'
         bucket.put_object_from_file(parse_prefix + 'scenario.xosc', local_xosc_path2)
         bucket.put_object_from_file(parse_prefix + 'scenario_hmi.xosc', local_xosc_path2)
         logging.info("上传 scenario_hmi.xosc 成功: %s" % str(parse_prefix + 'scenario.xosc'))
+        
+        # ------- 处理 output.json - 开始 -------
+        output_json_path = local_parse_dir + 'simulation/xosc/output.json'
+        callback_json_oss_key = parse_prefix+'callback.json'
+        callback_json_local = local_parse_dir+'/callback.json'
+        try:
+            # 1 解析 output.json
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            # todo 可能没有callback.json,已经处理成 callback_done.json了,暂时不管
+            logging.exception("处理 output.json报错: %s" % str(e3))
+            pass
+        # ------- 处理 output.json - 结束 -------
+        
+        
+        # 处理删除
         local_delete_list.append(local_xosc_path2)
+        local_delete_list.append(callback_json_local)
     except Exception as e:
-        error_bag_list = parse_json_to_string_array(error_bag_json)
+        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
         error_bag_list.append(parse_prefix)
-        list_to_json_file(error_bag_list, error_bag_json)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成xosc报错: %s" % str(e))
 
-
-'''
-cname:http://open-bucket.oss.icvdc.com
-内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
-oss桶名: open-bucket
-keyid:n8glvFGS25MrLY7j
-secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
-'''
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         try:
             logging.info("开始新一轮扫描")
@@ -141,26 +100,23 @@ if __name__ == '__main__':
                                 xosc_done = True
                             if '/objects_pji.csv' in str(obj3.key):
                                 csv1_done = True
-                            if '/pos_pji.csv' in str(obj3.key):
+                            if '/ego_pji.csv' in str(obj3.key):
                                 csv2_done = True
                         if xosc_done:
                             continue
                         if not csv1_done:
-                            logging.info("不存在 /objects_pji.csv: %s" % str(parse_prefix_full))
                             continue
                         if not csv2_done:
-                            logging.info("不存在 /pos_pji.csv: %s" % str(parse_prefix_full))
                             continue
-                        error_bag_list = parse_json_to_string_array(error_bag_json)
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
                         if str(parse_prefix_full) in error_bag_list:
                             continue
                         logging.info("需要生成 scenario_hmi.xosc: %s" % str(parse_prefix_full))
                         local_dir_full = path1 + parse_prefix_full
                         if not os.path.exists(local_dir_full):
                             os.makedirs(local_dir_full)
-                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv',
-                                                  local_dir_full + 'objects_pji.csv')
-                        bucket.get_object_to_file(parse_prefix_full + 'pos_pji.csv', local_dir_full + 'pos_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv',local_dir_full + 'objects_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'ego_pji.csv', local_dir_full + 'ego_pji.csv')
                         generate_xosc(parse_prefix_full, local_dir_full, local_delete_list)
                 except Exception as e:
                     logging.exception("局部异常处理: %s", str(e))

+ 21 - 29
src/python2/pjibot_patrol/callback-pjibot_patrol.py

@@ -4,14 +4,9 @@ import time
 import urllib2
 import oss2
 from datetime import datetime, timedelta
-
 import logging
-
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
-
-logging.basicConfig(filename=path1 + 'log/callback-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/callback-pjibot_patrol.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_patrol/'
 key2 = 'data/'
 key3 = 'data_merge/'
@@ -29,13 +24,6 @@ def add_hour(date_string, hour_number):
     return new_date.strftime("%Y-%m-%d-%H-%M-%S")
 
 
-'''
-cname:http://open-bucket.oss.icvdc.com
-内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
-oss桶名: open-bucket
-keyid:n8glvFGS25MrLY7j
-secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
-'''
 if __name__ == '__main__':
     auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
     endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
@@ -62,7 +50,7 @@ if __name__ == '__main__':
                         file8 = False
                         file9 = False
                         for obj2 in oss2.ObjectIterator(bucket, prefix=prefix+ '/'):
-                            if '/callback.json' in str(obj2.key):
+                            if '/callback_done.json' in str(obj2.key):
                                 file1 = True
                             if '/ego_pji.csv' in str(obj2.key):
                                 file2 = True
@@ -70,18 +58,18 @@ if __name__ == '__main__':
                                 file3 = True
                             if '/pcd_overlook.mp4' in str(obj2.key):
                                 file4 = True
-                            if '/pos_pji.csv' in str(obj2.key):
+                            if '/drive.csv' in str(obj2.key):
                                 file5 = True
                             if '/scenario_orig.mp4' in str(obj2.key):
                                 file7 = True
-                            if '/simulation.xosc' in str(obj2.key) or '/scenario.xosc' in str(obj2.key):
+                            if '/scenario_hmi.xosc' in str(obj2.key):
                                 file8 = True
                             if '/trajectory_pji.csv' in str(obj2.key):
                                 file9 = True
-                        if not file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
+                        if file1 or not file2 or not file3 or not file4 or not file5 or not file7 or not file8 or not file9:
                             continue
                         time.sleep(1)
-                        logging.info("发送:%s", str(obj1.key))
+                        logging.info("发送:%s", prefix)
                         # 1 获取json内容
                         json_content = bucket.get_object(str(obj1.key)).read()
                         # 2 获取token
@@ -110,6 +98,11 @@ if __name__ == '__main__':
                             old_ros_bag_path = json_object['rosBagPath']
                             task_id = json_object['taskId']
                             trigger_id = json_object['triggerId']
+                            check = json_object['check']
+                            check_order = ['自车数据缺失', '不在道路范围', '无规划路径', '目标点缺失','点云缺失', '点云丢帧', '解析程序错误', '还原程序错误', '评价程序错误']
+                            check_order_dict = dict((item, idx) for idx, item in enumerate(check_order))
+                            check = sorted(check, key=lambda x: check_order_dict.get(x, float('inf')))
+                            check = ','.join(check) # 数组元素拼接成字符串序列
                         except Exception as e:
                             logging.exception("callback报错:%s", str(e))
                             continue
@@ -119,14 +112,11 @@ if __name__ == '__main__':
                         # new_date = add_hour(old_date, 8)
                         new_date = old_date
                         old_delete_list = []
+                        callback_done_oss_key = ''
                         for obj_old in oss2.ObjectIterator(bucket, prefix=old_file_path):
                             old_delete_list.append(str(obj_old.key))
                             if 'callback.json' in str(obj_old.key):
-                                bucket.copy_object(bucket_name, str(obj_old.key),
-                                                   str(obj_old.key).replace(old_date, new_date).replace(
-                                                       'callback.json',
-                                                       'callback_done.json'))
-                                bucket.delete_object(str(obj_old.key))  # 删除 callback.json
+                                callback_done_oss_key = str(obj_old.key).replace(old_date, new_date).replace('callback.json','callback_done.json')
                         # todo 时区不变也就不需要移动文件了
                         #     else:
                         #         bucket.copy_object(bucket_name, str(obj_old.key),
@@ -145,7 +135,8 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
                         else:
                             logging.info("json_object 不包含 'userId' 字段")
@@ -156,19 +147,20 @@ if __name__ == '__main__':
                                 "filePath": old_file_path.replace(old_date, new_date),
                                 "rosBagPath": old_ros_bag_path.replace(old_date, new_date),
                                 "taskId": task_id,
-                                "triggerId": trigger_id
+                                "triggerId": trigger_id,
+                                "check":check
                             }
                         json_data2 = json.dumps(data2)
+                        bucket.put_object(callback_done_oss_key, json_data2)
                         logging.info("回调接口请求中:%s" % url2_private)
-                        request2 = urllib2.Request(url2_private, json_data2,
-                                                   headers={'Content-Type': 'application/json',
-                                                            'authorization': access_token})
+                        logging.info("回调接口发送参数为: %s" % str(data2))
+                        request2 = urllib2.Request(url2_private, json_data2,headers={'Content-Type': 'application/json','authorization': access_token})
                         response2 = urllib2.urlopen(request2)
                         result_json2 = response2.read()
                         result_object2 = json.loads(result_json2)
                         logging.info("回调接口请求结果为: %s", result_object2)
                 except Exception as e:
                     logging.exception("局部异常处理: %s" % str(e))
-            time.sleep(2)
+            time.sleep(10)
         except Exception as e:
             logging.exception("全局错误处理: %s" % str(e))

+ 0 - 8
src/python2/pjibot_patrol/camera-nohup.sh

@@ -1,8 +0,0 @@
-#!/bin/bash
-if [ ! -d "./log" ]; then
-    mkdir "./log"
-    echo "Directory './log' created."
-else
-    echo "Directory './log' already exists."
-fi
-nohup python2 camera-pjibot_patrol.py > log/camera.out 2>&1 &

+ 0 - 94
src/python2/pjibot_patrol/camera-pjibot_patrol.py

@@ -1,94 +0,0 @@
-# -*- coding: utf-8 -*-
-import os
-import time
-import oss2
-
-from resource import parse_robot_image
-
-import logging
-
-path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
-
-logging.basicConfig(filename=path1 + 'log/camera-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
-key1 = 'pjibot_patrol/'
-sleep_time = 30  # 每多少秒扫描一次
-
-
-def parse_to_mp4(merged_bag_file_path, parse_prefix1, local_parse_dir2, local_delete_list3):
-    try:
-        flag, local_mp4_dir = parse_robot_image.parse(merged_bag_file_path, local_parse_dir2 + '/camera/')
-        if flag:  # 没有话题就不生成视频了
-            mp4_file_name = 'camera'
-            local_mp4_file_path1 = local_mp4_dir + '/' + mp4_file_name + '.mp4'
-            local_delete_list3.append(local_mp4_file_path1)
-            oss_csv_object_key1 = parse_prefix1 + mp4_file_name + '.mp4'
-            bucket.put_object_from_file(oss_csv_object_key1, local_mp4_file_path1)
-            logging.info("上传 camera.mp4 成功: %s", str(oss_csv_object_key1))
-            local_delete_list.append(local_merged_bag_path)
-        else:
-            logging.info("没有图像话题: %s", merged_bag_file_path)
-    except Exception as e2:
-        logging.exception("生成摄像头视频报错: %s", e2)
-
-
-# ------- 获取合并之后的bag包,解析出csv -------
-if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
-
-    while True:
-        logging.info("开始新一轮扫描")
-        try:
-            local_delete_list = []
-            oss_delete_list = []
-            prefix_list = []
-            # 2 获取已经上传完成的所有目录并分组
-            for obj1 in oss2.ObjectIterator(bucket, prefix=key1):
-                # 获取合并后的包
-                merged_bag_object_key = str(obj1.key)
-                # print(f'判断1{merged_bag_object_key}')
-                if 'data_merge' in str(obj1.key) and str(obj1.key).endswith('.bag'):
-                    merged_bag_object_key_split = merged_bag_object_key.split('/')
-                    merged_prefix = '/'.join(merged_bag_object_key_split[:-1])
-                    parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
-                    parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/'
-                    camera_done = False
-                    for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):
-                        if '/camera.mp4' in str(obj2.key):
-                            camera_done = True
-                    if camera_done:
-                        continue
-
-                    local_merged_bag_path = path1 + 'camera/' + merged_bag_object_key
-                    local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
-                    local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
-                    if not os.path.exists(local_merged_dir):
-                        os.makedirs(local_merged_dir)
-                    if not os.path.exists(local_parse_dir):
-                        os.makedirs(local_parse_dir)
-                    merged_bag_full_name = merged_bag_object_key_split[-1]
-                    merged_bag_name = merged_bag_full_name.split('.')[0]
-                    try:
-                        bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
-                    except Exception as e:
-                        logging.exception("下载合并后的bag包失败: %s" % str(e))
-
-                    # 2 生成 pos_orig.csv 和 pos_hmi.csv
-                    logging.info("生成视频: %s" % str(obj1.key))
-                    parse_to_mp4(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
-
-            # 删除本地临时文件
-            if len(local_delete_list) > 0:
-                for local_delete in local_delete_list:
-                    try:
-                        os.remove(local_delete)
-                    except Exception as e:
-                        pass
-                        # logging.exception("捕获到一个异常: %s" % str(e))
-            time.sleep(sleep_time)
-        except Exception as e:
-            logging.exception("全局错误处理: %s" % str(e))

+ 98 - 63
src/python2/pjibot_patrol/csv-pjibot_patrol.py

@@ -1,107 +1,147 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
 import os
-import shutil
 import subprocess
 import time
 import oss2
-
-from resource import bagtocsv_robot
-
+import json
+import io
 import logging
-
+from resource import bagtocsv_robot
+from utils import json_utils
+#  创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
 path2 = '/mnt/disk001/pdf_outdoor/run/'
 path3 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/csv/'
-logging.basicConfig(filename=path1 + 'log/csv-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/csv-pjibot_patrol.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_patrol/'
 sleep_time = 30  # 每多少秒扫描一次
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/csv-errorBag.json"
+# ------- 全局配置 -------
 
 
 def parse_csv(data_bag, parse_prefix, local_parse_dir, local_delete_list):
     try:
         bagtocsv_robot.parse(data_bag, local_parse_dir + '/csv/')
         bagname = data_bag.split('/')[-1].split('.')[0]
-        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件的目录
+        local_csv_dir = os.path.join(local_parse_dir + '/csv/', bagname)  # 最终生成四个csv文件和output.json的目录
 
+        # ------- 处理 output.json - 开始 -------
+        try:
+            output_json_path = str(local_csv_dir)+'/output.json'
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                callback_json_oss_key = parse_prefix+'callback.json'
+                callback_json_local = local_csv_dir+'/callback.json'
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                data['check'] = list(set(data['check'])) # 去重
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            pass
+        # ------- 处理 output.json - 结束 -------
         csv_file_name1 = 'trajectory_pji'
         local_csv_file_path1 = str(local_csv_dir) + '/' + str(csv_file_name1) + '.csv'
         oss_csv_object_key1 = parse_prefix + csv_file_name1 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        if os.path.exists(local_csv_file_path1):
+            bucket.put_object_from_file(oss_csv_object_key1, local_csv_file_path1)
+        else:
+            logging.error("没有 trajectory_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name2 = 'ego_pji'
         local_csv_file_path2 = str(local_csv_dir) + '/' + str(csv_file_name2) + '.csv'
         oss_csv_object_key2 = parse_prefix + csv_file_name2 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        if os.path.exists(local_csv_file_path2):
+            bucket.put_object_from_file(oss_csv_object_key2, local_csv_file_path2)
+        else:
+            logging.error("没有 ego_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
-        csv_file_name3 = 'pos_pji'
+        csv_file_name3 = 'targetposition'
         local_csv_file_path3 = str(local_csv_dir) + '/' + str(csv_file_name3) + '.csv'
         oss_csv_object_key3 = parse_prefix + csv_file_name3 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        if os.path.exists(local_csv_file_path3):
+            bucket.put_object_from_file(oss_csv_object_key3, local_csv_file_path3)
+        else:
+            logging.error("没有 targetposition.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
 
         csv_file_name4 = 'objects_pji'
         local_csv_file_path4 = str(local_csv_dir) + '/' + str(csv_file_name4) + '.csv'
         oss_csv_object_key4 = parse_prefix + csv_file_name4 + '.csv'
-        bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
-
-        # 生成pdf
-        try:
+        if os.path.exists(local_csv_file_path4):
+            bucket.put_object_from_file(oss_csv_object_key4, local_csv_file_path4)
+        else:
+            logging.error("没有 objects_pji.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+        
+        csv_file_name5 = 'drive'
+        local_csv_file_path5 = str(local_csv_dir) + '/' + str(csv_file_name5) + '.csv'
+        oss_csv_object_key5 = parse_prefix + csv_file_name5 + '.csv'
+        if os.path.exists(local_csv_file_path5):
+            bucket.put_object_from_file(oss_csv_object_key5, local_csv_file_path5)
+        else:
+            logging.error("没有 drive.csv")
+            json_utils.add_error(parse_prefix,error_bag_json)
+
+        # ------- 生成pdf - 开始 -------
+        pdf_local_path = str(local_csv_dir) + '/report.pdf'
+        can_pdf = True
+        for output in outputs:
+            if str(output) in ['自车数据缺失','无规划路径']:
+                logging.error("【自车数据缺失、无规划路径】导致无法生成评价报告PDF")
+                can_pdf = False
+        if can_pdf:
             os.chdir(path2)
-            # 构造命令
             command1 = ['./pji_outdoor_real',
                         os.path.join(local_csv_dir, ''),  # 注意:这里可能不需要末尾的 '/',取决于程序要求
                         os.path.join(local_csv_dir, ''),  # 同上
                         os.path.join(local_csv_dir, 'trajectory.png'),
                         bagname]
-
-            # 记录调用命令的信息
             logging.info("调用生成pdf 报告命令: %s" % ' '.join(command1))
-
-            # 使用 subprocess.Popen 执行命令
             process = subprocess.Popen(command1, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
             stdout, stderr = process.communicate()  # 等待命令执行完成
-
-            # 检查是否有输出或错误
             if stdout:
                 logging.info("命令的标准输出:")
                 logging.info(stdout.decode('utf-8'))  # Python 2 中需要解码
             if stderr:
                 logging.error("命令的错误输出:")
                 logging.error(stderr.decode('utf-8'))  # Python 2 中需要解码
-
-            # 检查命令是否成功执行
-            if process.returncode == 0:
-                logging.info("命令执行成功")
-            else:
+            if process.returncode != 0:
                 logging.error("命令执行失败,退出码: %s" % process.returncode)
-
-        except OSError as e:
-            # 如果更改目录失败或命令不存在等
-            logging.error("在执行过程中发生错误: %s" % e)
-
-        oss_csv_object_key5 = parse_prefix + 'report.pdf'
-        bucket.put_object_from_file(oss_csv_object_key5, str(local_csv_dir) + '/report.pdf')
-        logging.info("pdf 报告生成并上传完成。")
-
+            oss_csv_object_key5 = parse_prefix + 'report.pdf'
+            bucket.put_object_from_file(oss_csv_object_key5, pdf_local_path)
+            logging.info("pdf 报告生成并上传完成。")
+        # ------- 生成pdf - 结束 -------
+        
         # 记得删除
         local_delete_list.append(local_csv_file_path1)
         local_delete_list.append(local_csv_file_path2)
-        local_delete_list.append(local_csv_file_path3)
         local_delete_list.append(local_csv_file_path4)
-        local_delete_list.append(str(local_csv_dir) + '/report.pdf')
+        local_delete_list.append(output_json_path)
+        local_delete_list.append(pdf_local_path)
+        local_delete_list.append(str(local_csv_dir) + '/trajectory.png')
 
     except Exception as e2:
-        # 当出现异常时执行的代码
         logging.exception("生成csv报错: %s", e2)
+        json_utils.add_error(parse_prefix,error_bag_json)
 
-
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         logging.info("开始新一轮扫描:%s " % key1)
         try:
@@ -120,24 +160,23 @@ if __name__ == '__main__':
                                                                                                           '/')  # data_parse 目录
                     csv1_done = False
                     csv2_done = False
-                    csv3_done = False
                     csv4_done = False
-                    pdf_done = False
+                    csv5_done = False
                     for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full): # 判断 data_parse 目录下是否有解析后的文件
                         if '/trajectory_pji.csv' in str(obj2.key):
                             csv1_done = True
                         if '/ego_pji.csv' in str(obj2.key):
                             csv2_done = True
-                        if '/pos_pji.csv' in str(obj2.key):
-                            csv3_done = True
                         if '/objects_pji.csv' in str(obj2.key):
                             csv4_done = True
-                        if '/report.pdf' in str(obj2.key):
-                            pdf_done = True
-                    if csv1_done and csv2_done and csv3_done and csv4_done and pdf_done:
+                        if '/drive.csv' in str(obj2.key):
+                            csv5_done = True
+                    if csv1_done and csv2_done and csv4_done and csv5_done:
                         continue
-
-                    logging.info("开始生成场景还原csv: %s" % str(obj1.key))
+                    error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                    if parse_prefix_full in error_bag_list:
+                        continue
+                    logging.info("------- 生成场景还原csv - 开始: %s -------" % str(obj1.key))
                     local_merged_bag_path = path3 + merged_bag_object_key
                     local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
                     local_parse_dir = local_merged_dir.replace('data_merge', 'data_parse')
@@ -147,22 +186,18 @@ if __name__ == '__main__':
                         os.makedirs(local_parse_dir)
                     merged_bag_full_name = merged_bag_object_key_split[-1]
                     merged_bag_name = merged_bag_full_name.split('.')[0]
-                    try:
-                        bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
-                    except Exception as e:
-                        logging.exception("下载合并后的bag包失败: %s" % str(e))
-
+                    bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
                     local_delete_list.append(local_merged_bag_path)
                     # 2 生成 pos_orig.csv 和 pos_hmi.csv
                     parse_csv(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)
-
+                    logging.info("------- 生成场景还原csv - 结束: %s -------" % str(obj1.key))
             # 删除本地临时文件
             if len(local_delete_list) > 0:
                 for local_delete in local_delete_list:
                     try:
                         os.remove(local_delete)
                     except Exception as e:
-                        logging.exception("捕获到一个异常: %s" % str(e))
+                        pass
         except Exception as e:
             logging.exception("全局错误处理: %s" % str(e))
         time.sleep(sleep_time)

+ 21 - 26
src/python2/pjibot_patrol/pcd-pjibot_patrol.py

@@ -1,20 +1,24 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import os
 import time
 import oss2
-
-from resource import pcdtovideo_monitor_overlook
-
 import logging
-
+from resource import pcdtovideo_monitor_overlook
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
-logging.basicConfig(filename=path1 + 'log/pcd-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/pcd-pjibot_patrol.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_patrol/'
-path1 = '/root/'
 sleep_time = 30  # 每多少秒扫描一次
-
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/pcd-errorBag.json"
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
+# ------- 全局配置 -------
 
 def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local_delete_list):
     global bucket
@@ -27,22 +31,14 @@ def parse_to_pcd_mp4(merged_bag_file_path, parse_prefix2, local_parse_dir, local
         logging.info("上传点云视频到: %s", oss_csv_object_key3)
         local_delete_list.append(local_mp4_path)
     except Exception as e:
+        local_delete_list.remove(merged_bag_file_path)
+        error_bag_list = json_utils.jsonparse_json_to_string_array(error_bag_json)
+        error_bag_list.append(parse_prefix2)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成点云视频报错: %s" % str(e))
 
 
-'''
-cname:http://open-bucket.oss.icvdc.com
-内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
-oss桶名: open-bucket
-keyid:n8glvFGS25MrLY7j
-secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
-'''
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         logging.info("开始新一轮扫描")
         try:
@@ -60,16 +56,14 @@ if __name__ == '__main__':
                         parse_prefix = merged_prefix.replace('data_merge', 'data_parse')
                         parse_prefix_full = merged_bag_object_key.replace('data_merge', 'data_parse')[:-4] + '/'
                         pcd_done = False
-                        callback_done = False
                         for obj2 in oss2.ObjectIterator(bucket, prefix=parse_prefix_full):
-                            if '/callback.json' in str(obj2.key):
-                                callback_done = True
                             if '/pcd_overlook.mp4' in str(obj2.key):
                                 pcd_done = True
-                        if not callback_done:
-                            continue
                         if pcd_done:
                             continue
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                        if str(parse_prefix_full) in error_bag_list:
+                            continue
                         logging.info("%s 需要生成点云视频 pcd_overlook.mp4" % str(parse_prefix_full))
                         local_merged_bag_path = path1 + 'pcd/' + merged_bag_object_key
                         local_merged_dir = '/'.join(local_merged_bag_path.split('/')[:-1])
@@ -81,6 +75,7 @@ if __name__ == '__main__':
                         merged_bag_full_name = merged_bag_object_key_split[-1]
                         merged_bag_name = merged_bag_full_name.split('.')[0]
                         bucket.get_object_to_file(merged_bag_object_key, local_merged_bag_path)
+                        logging.info("本地 bag 包路径为【%s】" % str(local_merged_bag_path))
                         local_delete_list.append(local_merged_bag_path)
                         # 2 生成 pos_orig.csv 和 pos_hmi.csv
                         parse_to_pcd_mp4(local_merged_bag_path, parse_prefix_full, local_parse_dir, local_delete_list)

+ 34 - 26
src/python2/pjibot_patrol/resource/bagtocsv_robot.py

@@ -56,7 +56,11 @@ def parsehancheng(input_dir, output_dir):
         drive_file = open(output_dir + "/"+"drive.csv", 'w')
         writer_drive = csv.writer(drive_file)
         writer_drive.writerow(dic_drive)
-        
+
+        dic_targetposition=['Time', 'x', 'y', 'z', 'h']
+        targetposition_file = open(output_dir + "/"+"targetposition.csv", 'w')
+        writer_targetposition = csv.writer(targetposition_file)
+        writer_targetposition.writerow(dic_targetposition)        
     
         #dic_robot_pos = ['Time','simtime','FrameID','HeadingAngle','X', 'Y' ,'Z','latitude','longitude']
         #robot_pos_file = open(output_dir + "/"+"pos_pji.csv", 'w')
@@ -110,6 +114,7 @@ def parsehancheng(input_dir, output_dir):
             robot_pose_lost_flag=True
             final_trajectorye_lost_flag=True
             gnss_lost_flag=True
+            targetposition_lost_flag=True
             
             pcd_exist_flag=False
             
@@ -117,14 +122,32 @@ def parsehancheng(input_dir, output_dir):
             #用来判断机器人点云/图像/规划/定位是否丢帧↑↑↑
             
             for topic,msg,t in bag.read_messages(topics=['/wheel','/obstacle_detection','/wheel_odom','/cmd_vel','/robot_pose','/tracking/objects','/nav/task_feedback_info',
-                                                         '/robot/final_trajectory','/gnss','/image_raw','/velodyne_points']):   
+                                                         '/robot/final_trajectory','/gnss','/image_raw','/velodyne_points','/robot/targetposition']):   
                 
                 if first_message_time is None:  
                     first_message_time = t
                     first_message_time = rospy.Time.to_sec(first_message_time)
                     first_message_time = datetime.fromtimestamp(first_message_time)
                 
-                
+                if topic == "/robot/targetposition":
+                    targetposition_lost_flag=False
+                    X=msg.pose.position.x
+
+                    Y=msg.pose.position.y
+
+                    Z=msg.pose.position.z
+                    OX=msg.pose.orientation.x
+                    OY=msg.pose.orientation.y
+                    OZ=msg.pose.orientation.z
+                    OW=msg.pose.orientation.w
+                    H=quaternion_to_euler(OX,OY,OZ,OW)
+                    message_targetposition=[str(t)[:-6],X,Y,Z,H]
+                    writer_targetposition.writerow(message_targetposition)
+                   
+                    
+                    
+                    
+                    
                     
                 if topic == "/velodyne_points":
                     pcd_exist_flag=True
@@ -275,6 +298,8 @@ def parsehancheng(input_dir, output_dir):
             objects_file.close()
             EgoState_file.close()
             trajectory_file.close()
+            targetposition_file.close()
+            drive_file.close()
             
         
         with open(json_path, "w") as file:
@@ -288,7 +313,8 @@ def parsehancheng(input_dir, output_dir):
                 '''
             else:
                 data.append('点云缺失')
-                
+            if targetposition_lost_flag: 
+                data.append('目标点缺失')
             if robot_pose_lost_flag or gnss_lost_flag:
                 data.append('自车数据缺失')
             if final_trajectorye_lost_flag:
@@ -308,33 +334,15 @@ def parsehancheng(input_dir, output_dir):
         
         
 
-# if __name__ == "__main__":
-#    #input_dir='/home/dell/下载/VD100M6-BJ-Perception2024-10-24-15-48-07.bag'
-#    #output_dir='/home/dell/下载'
+def parse(input_dir, output_dir):
+   #input_dir='/media/dell/HIKSEMI1/2024-12-03-10-37-41.bag'
+   #output_dir='/media/dell/HIKSEMI1'
 #    input_dir=sys.argv[1]
 #    output_dir = sys.argv[2]
-#    bagname=input_dir.split('/')[-1].split('.bag')[0]
-
-   
-#    output_dir=os.path.join(output_dir, bagname)
-#    if not os.path.exists(output_dir):
-#        os.makedirs(output_dir)
-#    parsehancheng(input_dir, output_dir)
-
-
-
-
-# if __name__ == '__main__':
-def parse(input_dir, output_dir):
-    # input_dir='/media/dell/HIKSEMI/pji_DGNC/pjioutrobot_2024-08-21-15-12-04.bag'
-    # output_dir='/media/dell/HIKSEMI/pji_DGNC'
-    # input_dir=sys.argv[1]
-    # output_dir = sys.argv[2]
    bagname=input_dir.split('/')[-1].split('.bag')[0]
 
    
    output_dir=os.path.join(output_dir, bagname)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
-   parsehancheng(input_dir, output_dir)
-
+   parsehancheng(input_dir, output_dir)

+ 4 - 8
src/python2/pjibot_patrol/simulation-pjibot_patrol.py

@@ -6,18 +6,13 @@ import xml.etree.ElementTree as ET
 import shutil
 import docker
 import logging
-
 key1 = 'pjibot_patrol/'
 path1 = '/scenarios4/'
 path2 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
 path3 = '/mnt/disk001/simulation_outdoor/'
 vehicle_name = 'PuJin_patrol_robot'  # 配送 PuJin_distribution 巡检 PuJin_patrol_robot
 xoscName = 'scenario.xosc'
-
-
-logging.basicConfig(filename=path2 + 'log/simulation-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path2 + 'log/simulation-pjibot_patrol.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 sleep_time = 60  # 每多少秒扫描一次
 
 
@@ -55,6 +50,7 @@ def move_xosc_before_simulation(root_path):
             print("文件已删除:", xosc_src)
         else:
             print("文件不存在:", xosc_src)
+
     except Exception as e:
         logging.exception("修改xosc报错: %s" % str(e))
 
@@ -72,7 +68,7 @@ def upload_simulation(parse_prefix, mp41):
 
 def simulation(parse_prefix, mp41):
     try:
-        os.system("docker start vtd4")
+        os.system("docker start vtd1")
         # 实例化Docker客户端
         client = docker.from_env()
         while True:
@@ -82,7 +78,7 @@ def simulation(parse_prefix, mp41):
             run = False
             # 打印容器列表
             for container in containers:
-                if 'vtd4' == container.name:
+                if 'vtd1' == container.name:
                     run = True
                     break
             if not run:

+ 0 - 0
src/python2/pjibot_patrol/utils/__init__.py


+ 44 - 0
src/python2/pjibot_patrol/utils/json_utils.py

@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+import sys
+reload(sys)
+sys.setdefaultencoding('utf8')
+import json
+
+def parse_json_to_string_array(file_path):
+    try:
+        with open(file_path, 'r') as file:
+            file_content = file.read()
+            data = json.loads(file_content.decode('utf-8'))  # 假设文件是UTF-8编码,这里需要手动解码
+
+        # 检查数据是否是一个列表,并且列表中的元素是否是字符串
+        if isinstance(data, list):
+            for item in data:
+                if not isinstance(item, basestring):  # Python 2中字符串类型包括str和unicode,用basestring检查
+                    raise ValueError("JSON数组中的元素不是字符串")
+            return data
+        else:
+            return []
+    except Exception as e:
+        return []
+def list_to_json_file(data, file_path):
+    """
+    将列表转换为JSON格式并写入指定的文件路径。
+    如果文件已存在,则覆盖它。
+
+    参数:
+    data (list): 要转换为JSON的列表。
+    file_path (str): 要写入JSON数据的文件路径。
+    """
+    # 将列表转换为JSON格式的字符串,并确保输出为UTF-8编码的字符串
+    json_data = json.dumps(data, ensure_ascii=False, indent=4)
+    json_data_utf8 = json_data.encode('utf-8')  # 编码为UTF-8
+
+    # 以写入模式打开文件,如果文件已存在则覆盖
+    with open(file_path, 'w') as file:
+        # 将UTF-8编码的JSON字符串写入文件
+        file.write(json_data_utf8)
+
+def add_error(parse_prefix,error_bag_json):
+    error_bag_list = parse_json_to_string_array(error_bag_json)
+    error_bag_list.append(parse_prefix)
+    list_to_json_file(error_bag_list, error_bag_json)

+ 65 - 31
src/python2/pjibot_patrol/xosc-pjibot_patrol.py

@@ -1,47 +1,81 @@
 # -*- coding: utf-8 -*-
+# ------- 全局配置 -------
+import sys
+reload(sys)
+sys.setdefaultencoding("utf-8")
 import os
 import time
 import oss2
 import logging
-
+import json
+import subprocess
+import io
+from utils import json_utils
 path1 = '/mnt/disk001/dcl_data_process/src/python2/pjibot_patrol/'
 path2 = '/mnt/disk001/dcl_data_process/src/python3/pjibot_outdoor/'
-
-logging.basicConfig(filename=path1 + 'log/xosc-pjibot_patrol.log', level=logging.INFO,
-                    format='%(asctime)s - %(levelname)s - %(message)s')
-
+logging.basicConfig(filename=path1 + 'log/xosc-pjibot_patrol.log', level=logging.INFO,format='%(asctime)s - %(levelname)s - %(message)s')
 key1 = 'pjibot_patrol/'
 sleep_time = 60  # 每多少秒扫描一次
-
+error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjibot_delivery/xosc-errorBag.json"
+# 1 创建阿里云对象
+auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
+endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
+bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
+# ------- 全局配置 -------
 
 def generate_xosc(parse_prefix, local_parse_dir, local_delete_list):
     try:
         os.chdir(path2)
-        command2 = 'python3 jiqiren_outdoor.py ' + local_parse_dir[:-1] + ' 0'  # 配送机器人0 巡检机器人1
-        logging.info("进入目录 %s 调用命令2: %s", path2, str(command2))
-        os.system(command2)
+        command2 = 'python3 jiqiren_outdoor.py {} 0'.format(local_parse_dir[:-1])  # 配送机器人0 巡检机器人1
+        logging.info("进入目录 %s 调用命令2: %s", path2, command2)
+        process = subprocess.Popen(command2, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stdout, stderr = process.communicate()  # 等待进程执行完成并获取输出
+        if process.returncode == 0:
+            logging.info("命令输出: %s", stdout.decode("utf-8"))
+        else:
+            logging.error("命令执行失败,错误码: %d", process.returncode)
+            logging.error("命令错误输出: %s", stderr.decode("utf-8"))
         local_xosc_path2 = local_parse_dir + 'simulation/xosc/openx_outdoor0.xosc'
         bucket.put_object_from_file(parse_prefix + 'scenario.xosc', local_xosc_path2)
         bucket.put_object_from_file(parse_prefix + 'scenario_hmi.xosc', local_xosc_path2)
-        logging.info("上传 simulation.xosc(scenario.xosc) 成功: %s" % str(parse_prefix + 'simulation.xosc'))
+        logging.info("上传 scenario_hmi.xosc 成功: %s" % str(parse_prefix + 'scenario.xosc'))
+        
+        # ------- 处理 output.json - 开始 -------
+        output_json_path = local_parse_dir + 'simulation/xosc/output.json'
+        callback_json_oss_key = parse_prefix+'callback.json'
+        callback_json_local = local_parse_dir+'/callback.json'
+        try:
+            # 1 解析 output.json
+            if os.path.exists(output_json_path):
+                outputs = json_utils.parse_json_to_string_array(output_json_path)
+                # 2 将 output.json 添加到 callback.json 的 check 字段
+                bucket.get_object_to_file(callback_json_oss_key, callback_json_local)
+                with io.open(callback_json_local, 'r', encoding='utf-8') as f:
+                    data = json.load(f)
+                if 'check' not in data:
+                    data['check'] = []
+                data['check'].extend(outputs)
+                json_data = json.dumps(data, ensure_ascii=False, indent=4)
+                with io.open(callback_json_local, 'w', encoding='utf-8') as f:
+                    f.write(unicode(json_data))
+                bucket.put_object_from_file(callback_json_oss_key, callback_json_local)
+        except Exception as e3:
+            # todo 可能没有callback.json,已经处理成 callback_done.json了,暂时不管
+            logging.exception("处理 output.json报错: %s" % str(e3))
+            pass
+        # ------- 处理 output.json - 结束 -------
+        
+        
+        # 处理删除
         local_delete_list.append(local_xosc_path2)
+        local_delete_list.append(callback_json_local)
     except Exception as e:
+        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+        error_bag_list.append(parse_prefix)
+        json_utils.list_to_json_file(error_bag_list, error_bag_json)
         logging.exception("生成xosc报错: %s" % str(e))
 
-
-'''
-cname:http://open-bucket.oss.icvdc.com
-内网endpoint: oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com
-oss桶名: open-bucket
-keyid:n8glvFGS25MrLY7j
-secret:xZ2Fozoarpfw0z28FUhtg8cu0yDc5d
-'''
-# ------- 获取合并之后的bag包,解析出csv -------
 if __name__ == '__main__':
-    # 1 创建阿里云对象
-    auth = oss2.Auth('n8glvFGS25MrLY7j', 'xZ2Fozoarpfw0z28FUhtg8cu0yDc5d')
-    endpoint = 'oss-cn-beijing-gqzl-d01-a.ops.gqzl-cloud.com'
-    bucket = oss2.Bucket(auth, endpoint, 'pji-bucket1')
     while True:
         try:
             logging.info("开始新一轮扫描")
@@ -62,27 +96,27 @@ if __name__ == '__main__':
                         csv1_done = False
                         csv2_done = False
                         for obj3 in oss2.ObjectIterator(bucket, prefix=str(parse_prefix_full)):
-                            if '/simulation.xosc' in str(obj3.key) or '/scenario.xosc' in str(obj3.key):
+                            if '/scenario_hmi.xosc' in str(obj3.key): # 仿真使用的 scenario_hmi.xosc 所以必须有
                                 xosc_done = True
                             if '/objects_pji.csv' in str(obj3.key):
                                 csv1_done = True
-                            if '/pos_pji.csv' in str(obj3.key):
+                            if '/ego_pji.csv' in str(obj3.key):
                                 csv2_done = True
                         if xosc_done:
                             continue
                         if not csv1_done:
-                            logging.info("不存在/objects_pji.csv: %s" % str(parse_prefix_full))
                             continue
                         if not csv2_done:
-                            logging.info("不存在/pos_pji.csv: %s" % str(parse_prefix_full))
                             continue
-                        logging.info("需要生成simulation.xosc: %s" % str(parse_prefix_full))
+                        error_bag_list = json_utils.parse_json_to_string_array(error_bag_json)
+                        if str(parse_prefix_full) in error_bag_list:
+                            continue
+                        logging.info("需要生成 scenario_hmi.xosc: %s" % str(parse_prefix_full))
                         local_dir_full = path1 + parse_prefix_full
                         if not os.path.exists(local_dir_full):
                             os.makedirs(local_dir_full)
-                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv',
-                                                  local_dir_full + 'objects_pji.csv')
-                        bucket.get_object_to_file(parse_prefix_full + 'pos_pji.csv', local_dir_full + 'pos_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'objects_pji.csv',local_dir_full + 'objects_pji.csv')
+                        bucket.get_object_to_file(parse_prefix_full + 'ego_pji.csv', local_dir_full + 'ego_pji.csv')
                         generate_xosc(parse_prefix_full, local_dir_full, local_delete_list)
                 except Exception as e:
                     logging.exception("局部异常处理: %s", str(e))

+ 0 - 3
src/python2/pjisuv/2simulation-errorBag.json

@@ -1,3 +0,0 @@
-[
-    "pjibot_delivery/ps001/data_parse/test11071/"
-]

+ 1 - 2
src/python2/pjisuv/xosc-errorBag.json

@@ -1,8 +1,7 @@
 [
     "pjisuv/pjisuv-012/data_parse/2024-08-15-01-44-45_UnknownBigTargetAhead_71/", 
     "pjisuv/pjisuv-013/data_parse/2024-08-06-03-06-32_EnterTjunction_27/", 
-    "pjisuv/pjisuv-013/data_parse/2024-08-06-03-08-26_FindTrafficLight_EnterTjunction_93/", 
-    "pjisuv/pjisuv-004/data_parse/mlx11291/", 
+    "pjisuv/pjisuv-013/data_parse/2024-08-06-03-08-26_FindTrafficLight_EnterTjunction_93/",
     "pjisuv/pjisuv-013/data_parse/2024-08-06-03-03-22_FindTrafficLight_86/", 
     "pjisuv/pjisuv-013/data_parse/2024-08-06-03-07-56_FindTrafficLight_65/", 
     "pjisuv/pjisuv-013/data_parse/2024-08-06-03-06-49_EnterTjunction_ControlJump_57/", 

+ 1 - 1
src/python2/pjisuv/xosc-pjisuv.py

@@ -12,7 +12,7 @@ logging.basicConfig(filename=path1 + 'log/xosc-pjisuv.log', level=logging.INFO,
                     format='%(asctime)s - %(levelname)s - %(message)s')
 
 key1 = 'pjisuv/'
-sleep_time = 10  # 每多少秒扫描一次
+sleep_time = 20  # 每多少秒扫描一次
 
 error_bag_json = "/mnt/disk001/dcl_data_process/src/python2/pjisuv/xosc-errorBag.json"
 

BIN=BIN
src/python3/pjibot_outdoor/a.out


+ 2 - 2
src/python3/pjibot_outdoor/jiqiren_outdoor.py

@@ -20,8 +20,8 @@ import warnings
 warnings.filterwarnings("ignore")
 
 
-xodr_list = ['/home/hancheng/maps/taiheqiao_map/thq_1116.xodr', '/home/hancheng/maps/anqing/anqing.xodr']
-map_engine = '/media/hancheng/Simulation5/pujin/a.out'
+xodr_list = ['/mnt/disk001/simulation_outdoor/thq_1116.xodr', '/mnt/disk001/simulation_outdoor/anqing.xodr']
+map_engine = '/mnt/disk001/dcl_data_process/src/python3/pjibot_outdoor/a.out'
 
 
 class Batchrun: