|
@@ -8,6 +8,7 @@ import api.common.util.JsonUtil;
|
|
import api.common.util.StringUtil;
|
|
import api.common.util.StringUtil;
|
|
import com.css.simulation.resource.scheduler.mapper.*;
|
|
import com.css.simulation.resource.scheduler.mapper.*;
|
|
import com.css.simulation.resource.scheduler.pojo.po.*;
|
|
import com.css.simulation.resource.scheduler.pojo.po.*;
|
|
|
|
+import com.css.simulation.resource.scheduler.pojo.to.KubernetesNodeTO;
|
|
import com.css.simulation.resource.scheduler.pojo.to.PrefixTO;
|
|
import com.css.simulation.resource.scheduler.pojo.to.PrefixTO;
|
|
import com.css.simulation.resource.scheduler.service.ProjectService;
|
|
import com.css.simulation.resource.scheduler.service.ProjectService;
|
|
import com.css.simulation.resource.scheduler.util.ProjectUtil;
|
|
import com.css.simulation.resource.scheduler.util.ProjectUtil;
|
|
@@ -16,13 +17,12 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|
import lombok.SneakyThrows;
|
|
import lombok.SneakyThrows;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
-import org.springframework.beans.factory.annotation.Autowired;
|
|
|
|
import org.springframework.beans.factory.annotation.Value;
|
|
import org.springframework.beans.factory.annotation.Value;
|
|
import org.springframework.data.redis.core.StringRedisTemplate;
|
|
import org.springframework.data.redis.core.StringRedisTemplate;
|
|
import org.springframework.kafka.annotation.KafkaListener;
|
|
import org.springframework.kafka.annotation.KafkaListener;
|
|
-import org.springframework.kafka.core.KafkaTemplate;
|
|
|
|
import org.springframework.stereotype.Component;
|
|
import org.springframework.stereotype.Component;
|
|
|
|
|
|
|
|
+import javax.annotation.Resource;
|
|
import java.util.HashSet;
|
|
import java.util.HashSet;
|
|
import java.util.List;
|
|
import java.util.List;
|
|
import java.util.Set;
|
|
import java.util.Set;
|
|
@@ -47,39 +47,25 @@ public class ProjectConsumer {
|
|
|
|
|
|
// -------------------------------- Comment --------------------------------
|
|
// -------------------------------- Comment --------------------------------
|
|
|
|
|
|
- @Autowired
|
|
|
|
- KafkaTemplate<String, String> kafkaTemplate;
|
|
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
StringRedisTemplate redisTemplate;
|
|
StringRedisTemplate redisTemplate;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
ManualProjectMapper manualProjectMapper;
|
|
ManualProjectMapper manualProjectMapper;
|
|
- @Autowired
|
|
|
|
- AutoProjectMapper autoProjectMapper;
|
|
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
AutoSubProjectMapper autoSubProjectMapper;
|
|
AutoSubProjectMapper autoSubProjectMapper;
|
|
- @Autowired
|
|
|
|
- TaskMapper taskMapper;
|
|
|
|
- @Autowired
|
|
|
|
- IndexMapper indexMapper;
|
|
|
|
- @Autowired
|
|
|
|
- IndexTemplateMapper indexTemplateMapper;
|
|
|
|
- @Autowired
|
|
|
|
- SceneMapper sceneMapper;
|
|
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
VehicleMapper vehicleMapper;
|
|
VehicleMapper vehicleMapper;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
SensorCameraMapper sensorCameraMapper;
|
|
SensorCameraMapper sensorCameraMapper;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
SensorOgtMapper sensorOgtMapper;
|
|
SensorOgtMapper sensorOgtMapper;
|
|
- @Autowired
|
|
|
|
- AlgorithmMapper algorithmMapper;
|
|
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
UserMapper userMapper;
|
|
UserMapper userMapper;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
ClusterMapper clusterMapper;
|
|
ClusterMapper clusterMapper;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
ProjectService projectService;
|
|
ProjectService projectService;
|
|
- @Autowired
|
|
|
|
|
|
+ @Resource
|
|
ProjectUtil projectUtil;
|
|
ProjectUtil projectUtil;
|
|
|
|
|
|
|
|
|
|
@@ -96,7 +82,7 @@ public class ProjectConsumer {
|
|
//1 读取 kafka 的 project 信息
|
|
//1 读取 kafka 的 project 信息
|
|
ProjectMessageDTO projectMessageDTO = JsonUtil.jsonToBean(projectJson, ProjectMessageDTO.class);
|
|
ProjectMessageDTO projectMessageDTO = JsonUtil.jsonToBean(projectJson, ProjectMessageDTO.class);
|
|
String projectId = projectMessageDTO.getProjectId(); // 手动执行项目 id 或 自动执行子项目 id
|
|
String projectId = projectMessageDTO.getProjectId(); // 手动执行项目 id 或 自动执行子项目 id
|
|
- Long parallelism = projectMessageDTO.getParallelism(); // 项目并行度
|
|
|
|
|
|
+ long parallelism = projectMessageDTO.getParallelism(); // 项目并行度
|
|
String projectType = projectMessageDTO.getType(); // 项目类型
|
|
String projectType = projectMessageDTO.getType(); // 项目类型
|
|
//2 根据 projectId 获取创建用户 id
|
|
//2 根据 projectId 获取创建用户 id
|
|
String userId;
|
|
String userId;
|
|
@@ -119,7 +105,7 @@ public class ProjectConsumer {
|
|
ClusterPO clusterPO;
|
|
ClusterPO clusterPO;
|
|
if (DictConstants.ROLE_CODE_SYSADMIN.equals(roleCode) || DictConstants.ROLE_CODE_ADMIN.equals(roleCode)) { //3-1 管理员账户和管理员子账户直接执行
|
|
if (DictConstants.ROLE_CODE_SYSADMIN.equals(roleCode) || DictConstants.ROLE_CODE_ADMIN.equals(roleCode)) { //3-1 管理员账户和管理员子账户直接执行
|
|
PrefixTO redisPrefix = projectUtil.getRedisPrefixByClusterIdAndProjectId(DictConstants.SYSTEM_CLUSTER_ID, projectId);
|
|
PrefixTO redisPrefix = projectUtil.getRedisPrefixByClusterIdAndProjectId(DictConstants.SYSTEM_CLUSTER_ID, projectId);
|
|
- run(DictConstants.SYSTEM_CLUSTER_ID, projectId, redisPrefix.getProjectRunningKey(), projectJson);
|
|
|
|
|
|
+ run(DictConstants.SYSTEM_CLUSTER_ID, projectId, projectType, redisPrefix.getProjectRunningKey(), projectJson, parallelism);
|
|
return;
|
|
return;
|
|
} else if (DictConstants.ROLE_CODE_UESR.equals(roleCode)) { //3-2 普通账户,不管是独占还是共享,都在自己的集群里排队,根据自己的独占节点排队
|
|
} else if (DictConstants.ROLE_CODE_UESR.equals(roleCode)) { //3-2 普通账户,不管是独占还是共享,都在自己的集群里排队,根据自己的独占节点排队
|
|
clusterPO = clusterMapper.selectByUserId(userId);
|
|
clusterPO = clusterMapper.selectByUserId(userId);
|
|
@@ -142,12 +128,12 @@ public class ProjectConsumer {
|
|
Set<String> clusterRunningKeySet = redisTemplate.keys(redisPrefix.getClusterRunningPrefix() + "*");
|
|
Set<String> clusterRunningKeySet = redisTemplate.keys(redisPrefix.getClusterRunningPrefix() + "*");
|
|
List<String> runningProjectSet;
|
|
List<String> runningProjectSet;
|
|
if (CollectionUtil.isEmpty(clusterRunningKeySet)) {
|
|
if (CollectionUtil.isEmpty(clusterRunningKeySet)) {
|
|
- run(clusterId, projectId, redisPrefix.getProjectRunningKey(), projectJson);
|
|
|
|
|
|
+ run(clusterId, projectId, projectType, redisPrefix.getProjectRunningKey(), projectJson, parallelism);
|
|
return;
|
|
return;
|
|
}
|
|
}
|
|
runningProjectSet = projectUtil.getRunningProjectList(clusterRunningKeySet);
|
|
runningProjectSet = projectUtil.getRunningProjectList(clusterRunningKeySet);
|
|
if (CollectionUtil.isEmpty(runningProjectSet)) {
|
|
if (CollectionUtil.isEmpty(runningProjectSet)) {
|
|
- run(clusterId, projectId, redisPrefix.getProjectRunningKey(), projectJson);
|
|
|
|
|
|
+ run(clusterId, projectId, projectType, redisPrefix.getProjectRunningKey(), projectJson, parallelism);
|
|
return;
|
|
return;
|
|
}
|
|
}
|
|
// 计算正在运行的项目的并行度总和
|
|
// 计算正在运行的项目的并行度总和
|
|
@@ -159,46 +145,54 @@ public class ProjectConsumer {
|
|
}
|
|
}
|
|
// 如果执行后的并行度总和小于最大节点数则执行,否则不执行
|
|
// 如果执行后的并行度总和小于最大节点数则执行,否则不执行
|
|
if (parallelismSum + parallelism <= simulationLicenseNumber) {
|
|
if (parallelismSum + parallelism <= simulationLicenseNumber) {
|
|
- run(clusterId, projectId, redisPrefix.getProjectRunningKey(), projectJson);
|
|
|
|
|
|
+ run(clusterId, projectId, projectType, redisPrefix.getProjectRunningKey(), projectJson, parallelism);
|
|
} else {
|
|
} else {
|
|
wait(clusterId, projectId, redisPrefix.getProjectWaitingKey(), projectJson);
|
|
wait(clusterId, projectId, redisPrefix.getProjectWaitingKey(), projectJson);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
- public void run(String clusterId, String projectId, String projectRunningKey, String projectJson) {
|
|
|
|
- log.info("ProjectConsumer--run 集群 " + clusterId + " 将项目 " + projectId + " 执行!");
|
|
|
|
- redisTemplate.opsForValue().set(projectRunningKey, projectJson);
|
|
|
|
- parseProject(projectJson, "cluster:" + clusterId, projectRunningKey);
|
|
|
|
|
|
+ public void run(String clusterId, String projectId, String projectType, String projectRunningKey, String projectJson, long parallelism) {
|
|
|
|
+
|
|
|
|
+ //1 获取一个剩余可用并行度最大的节点
|
|
|
|
+ KubernetesNodeTO maxParallelismPNodeTO = projectUtil.getMaxParallelismPNode();
|
|
|
|
+ String maxRestParallelismNode = maxParallelismPNodeTO.getName();
|
|
|
|
+ Long maxRestParallelism = maxParallelismPNodeTO.getMaxParallelism();
|
|
|
|
+
|
|
|
|
+ //2 判断剩余可用并行度是否大于项目并行度,否则加入扩充队列
|
|
|
|
+ if (maxRestParallelism > parallelism) {
|
|
|
|
+ log.info("ProjectConsumer--run 集群 " + clusterId + " 将项目 " + projectId + "在节点" + maxRestParallelismNode + " 执行!");
|
|
|
|
+ parseProject(projectId, projectType, projectJson, "cluster:" + clusterId, projectRunningKey, maxRestParallelismNode, parallelism);
|
|
|
|
+ } else if (maxRestParallelism > 0) {
|
|
|
|
+ log.info("ProjectConsumer--run 集群 " + clusterId + " 将项目 " + projectId + "在节点" + maxRestParallelismNode + " 执行!");
|
|
|
|
+ parseProject(projectId, projectType, projectJson, "cluster:" + clusterId, projectRunningKey, maxRestParallelismNode, maxRestParallelism);
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
public void wait(String clusterId, String projectId, String projectWaitingKey, String projectJson) {
|
|
public void wait(String clusterId, String projectId, String projectWaitingKey, String projectJson) {
|
|
- log.info("ProjectConsumer--cacheManualProject 集群 " + clusterId + " 将项目 " + projectId + " 放入等待队列!");
|
|
|
|
|
|
+ log.info("ProjectConsumer--wait 集群 " + clusterId + " 将项目 " + projectId + " 放入等待队列!");
|
|
redisTemplate.opsForValue().set(projectWaitingKey, projectJson);
|
|
redisTemplate.opsForValue().set(projectWaitingKey, projectJson);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
/**
|
|
- * 开始执行以及重新执行
|
|
|
|
- *
|
|
|
|
- * @param projectJson 项目启动消息
|
|
|
|
|
|
+ * @param projectId
|
|
|
|
+ * @param projectJson
|
|
|
|
+ * @param clusterPrefix
|
|
|
|
+ * @param projectRunningPrefix projectRunningKey
|
|
|
|
+ * @param nodeName
|
|
|
|
+ * @param parallelism
|
|
*/
|
|
*/
|
|
@SneakyThrows
|
|
@SneakyThrows
|
|
- public void parseProject(String projectJson, String clusterPrefix, String projectRunningPrefix) {
|
|
|
|
-
|
|
|
|
|
|
+ public void parseProject(String projectId, String projectType, String projectJson, String clusterPrefix, String projectRunningPrefix, String nodeName, long parallelism) {
|
|
// -------------------------------- 0 准备 --------------------------------
|
|
// -------------------------------- 0 准备 --------------------------------
|
|
|
|
+ projectService.prepare(clusterPrefix, projectId, projectType, projectRunningPrefix, projectJson, nodeName, parallelism);
|
|
log.info("ProjectConsumer--parseManualProject 接收到项目开始消息为:" + projectJson);
|
|
log.info("ProjectConsumer--parseManualProject 接收到项目开始消息为:" + projectJson);
|
|
- //1 读取 kafka 的 project 信息
|
|
|
|
ProjectMessageDTO projectMessageDTO = JsonUtil.jsonToBean(projectJson, ProjectMessageDTO.class);
|
|
ProjectMessageDTO projectMessageDTO = JsonUtil.jsonToBean(projectJson, ProjectMessageDTO.class);
|
|
- String projectId = projectMessageDTO.getProjectId(); // 项目 id
|
|
|
|
- String projectType = projectMessageDTO.getType(); // 项目 类型
|
|
|
|
String packageId = projectMessageDTO.getScenePackageId(); // 场景测试包 id
|
|
String packageId = projectMessageDTO.getScenePackageId(); // 场景测试包 id
|
|
Long maxSimulationTime = projectMessageDTO.getMaxSimulationTime(); // 最大仿真时间,即生成视频的时间长度
|
|
Long maxSimulationTime = projectMessageDTO.getMaxSimulationTime(); // 最大仿真时间,即生成视频的时间长度
|
|
String vehicleConfigId = projectMessageDTO.getVehicleConfigId();// 模型配置 id
|
|
String vehicleConfigId = projectMessageDTO.getVehicleConfigId();// 模型配置 id
|
|
String algorithmId = projectMessageDTO.getAlgorithmId(); // 算法 id
|
|
String algorithmId = projectMessageDTO.getAlgorithmId(); // 算法 id
|
|
String userId = manualProjectMapper.selectCreateUserById(projectId);
|
|
String userId = manualProjectMapper.selectCreateUserById(projectId);
|
|
- Long parallelism = projectMessageDTO.getParallelism(); // 并行度
|
|
|
|
- //2 执行前准备,删除改项目下所有任务,即重新执行改项目时需要新的测试包
|
|
|
|
- projectService.prepare(clusterPrefix, projectId, projectJson);
|
|
|
|
// -------------------------------- 1 查询场景 --------------------------------
|
|
// -------------------------------- 1 查询场景 --------------------------------
|
|
//1-1 根据场景测试包 packageId,拿到场景集合(不包括重复场景),重复场景会在发送消息时根据叶子指标发送多次。
|
|
//1-1 根据场景测试包 packageId,拿到场景集合(不包括重复场景),重复场景会在发送消息时根据叶子指标发送多次。
|
|
List<ScenePO> scenePOList = projectService.handlePackage(projectRunningPrefix, projectId, packageId);
|
|
List<ScenePO> scenePOList = projectService.handlePackage(projectRunningPrefix, projectId, packageId);
|
|
@@ -214,6 +208,7 @@ public class ProjectConsumer {
|
|
String algorithmDockerImage = projectService.handleAlgorithm(projectId, algorithmId);
|
|
String algorithmDockerImage = projectService.handleAlgorithm(projectId, algorithmId);
|
|
// -------------------------------- 5 创建 pod 开始执行 --------------------------------
|
|
// -------------------------------- 5 创建 pod 开始执行 --------------------------------
|
|
projectService.transferAndRunYaml(
|
|
projectService.transferAndRunYaml(
|
|
|
|
+ nodeName,
|
|
jobTemplate + "job-template.yaml",
|
|
jobTemplate + "job-template.yaml",
|
|
projectId,
|
|
projectId,
|
|
algorithmDockerImage,
|
|
algorithmDockerImage,
|