|
@@ -7,11 +7,7 @@ import api.common.util.*;
|
|
import com.css.simulation.resource.scheduler.mapper.*;
|
|
import com.css.simulation.resource.scheduler.mapper.*;
|
|
import com.css.simulation.resource.scheduler.pojo.po.*;
|
|
import com.css.simulation.resource.scheduler.pojo.po.*;
|
|
import com.css.simulation.resource.scheduler.pojo.to.*;
|
|
import com.css.simulation.resource.scheduler.pojo.to.*;
|
|
-import com.css.simulation.resource.scheduler.util.MinioUtil;
|
|
|
|
import io.kubernetes.client.openapi.ApiClient;
|
|
import io.kubernetes.client.openapi.ApiClient;
|
|
-import io.kubernetes.client.openapi.apis.BatchV1Api;
|
|
|
|
-import io.kubernetes.client.openapi.models.V1Job;
|
|
|
|
-import io.kubernetes.client.util.Yaml;
|
|
|
|
import io.minio.MinioClient;
|
|
import io.minio.MinioClient;
|
|
import lombok.SneakyThrows;
|
|
import lombok.SneakyThrows;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
import lombok.extern.slf4j.Slf4j;
|
|
@@ -23,7 +19,6 @@ import org.springframework.kafka.annotation.KafkaListener;
|
|
import org.springframework.kafka.core.KafkaTemplate;
|
|
import org.springframework.kafka.core.KafkaTemplate;
|
|
import org.springframework.stereotype.Component;
|
|
import org.springframework.stereotype.Component;
|
|
|
|
|
|
-import java.io.File;
|
|
|
|
import java.util.ArrayList;
|
|
import java.util.ArrayList;
|
|
import java.util.Arrays;
|
|
import java.util.Arrays;
|
|
import java.util.List;
|
|
import java.util.List;
|
|
@@ -208,7 +203,8 @@ public class ManualProjectConsumer {
|
|
//4-4 将对象转成 json
|
|
//4-4 将对象转成 json
|
|
String taskJson = JsonUtil.beanToJson(taskTO);
|
|
String taskJson = JsonUtil.beanToJson(taskTO);
|
|
//4-5 将 projectId 作为 topic 名称,发送 task 信息到 kafka
|
|
//4-5 将 projectId 作为 topic 名称,发送 task 信息到 kafka
|
|
- kafkaTemplate.send(projectId, taskJson).addCallback(success -> {
|
|
|
|
|
|
+// kafkaTemplate.send(projectId, taskJson).addCallback(success -> {
|
|
|
|
+ kafkaTemplate.send("test", taskJson).addCallback(success -> {
|
|
// 消息发送到的topic
|
|
// 消息发送到的topic
|
|
String topic = success.getRecordMetadata().topic();
|
|
String topic = success.getRecordMetadata().topic();
|
|
// 消息发送到的分区
|
|
// 消息发送到的分区
|
|
@@ -229,60 +225,47 @@ public class ManualProjectConsumer {
|
|
// 私有仓库导入算法镜像
|
|
// 私有仓库导入算法镜像
|
|
String algorithmId = projectMessageDTO.getAlgorithmId(); // 算法 id
|
|
String algorithmId = projectMessageDTO.getAlgorithmId(); // 算法 id
|
|
//4-1 根据算法 id 获取算法文件地址、是否已导入成镜像。
|
|
//4-1 根据算法 id 获取算法文件地址、是否已导入成镜像。
|
|
- AlgorithmPO algorithmPO = algorithmMapper.selectById(algorithmId);
|
|
|
|
- if (algorithmPO == null){
|
|
|
|
- // 访问索为远程接口
|
|
|
|
- }
|
|
|
|
- String minioPath = algorithmPO.getMinioPath();
|
|
|
|
- String dockerImage;
|
|
|
|
- if ("0".equals(algorithmPO.getDockerImport())) {
|
|
|
|
- dockerImage = "algorithm_" + algorithmId + ":latest";
|
|
|
|
- String algorithmTarLinuxTempPath = linuxTempPath + minioPath;
|
|
|
|
- // 下载算法文件到本地( 2 到仓库服务器)
|
|
|
|
- MinioUtil.downloadToFile(minioClient, bucketName, minioPath, algorithmTarLinuxTempPath);
|
|
|
|
- //4-2 本地执行 docker load 算法文件成镜像(集群版可改成用 docker-java 操作仓库)
|
|
|
|
- LinuxUtil.execute("docker import " + algorithmTarLinuxTempPath + " " + dockerImage);
|
|
|
|
- } else if ("1".equals(algorithmPO.getDockerImport()) && StringUtil.isNotEmpty(algorithmPO.getDockerImage())) {
|
|
|
|
- dockerImage = algorithmPO.getDockerImage();
|
|
|
|
- } else {
|
|
|
|
- throw new RuntimeException("算法 " + algorithmId + "的 mysql 数据有误!");
|
|
|
|
- }
|
|
|
|
|
|
+// AlgorithmPO algorithmPO = algorithmMapper.selectById(algorithmId);
|
|
|
|
+// if (algorithmPO == null){
|
|
|
|
+// // 访问索为远程接口
|
|
|
|
+// }
|
|
|
|
+// String minioPath = algorithmPO.getMinioPath();
|
|
|
|
+// String dockerImage;
|
|
|
|
+// if ("0".equals(algorithmPO.getDockerImport())) {
|
|
|
|
+// dockerImage = "algorithm_" + algorithmId + ":latest";
|
|
|
|
+// String algorithmTarLinuxTempPath = linuxTempPath + minioPath;
|
|
|
|
+// // 下载算法文件到本地( 2 到仓库服务器)
|
|
|
|
+// MinioUtil.downloadToFile(minioClient, bucketName, minioPath, algorithmTarLinuxTempPath);
|
|
|
|
+// //4-2 本地执行 docker load 算法文件成镜像(集群版可改成用 docker-java 操作仓库)
|
|
|
|
+// LinuxUtil.execute("docker import " + algorithmTarLinuxTempPath + " " + dockerImage);
|
|
|
|
+// } else if ("1".equals(algorithmPO.getDockerImport()) && StringUtil.isNotEmpty(algorithmPO.getDockerImage())) {
|
|
|
|
+// dockerImage = algorithmPO.getDockerImage();
|
|
|
|
+// } else {
|
|
|
|
+// throw new RuntimeException("算法 " + algorithmId + "的 mysql 数据有误!");
|
|
|
|
+// }
|
|
// -------------------------------- 5 创建 pod 开始执行 --------------------------------
|
|
// -------------------------------- 5 创建 pod 开始执行 --------------------------------
|
|
int completions = sceneList.size(); // 结束标
|
|
int completions = sceneList.size(); // 结束标
|
|
int parallelism = projectMessageDTO.getParallelism(); // 并行度
|
|
int parallelism = projectMessageDTO.getParallelism(); // 并行度
|
|
log.info("------- ManualProjectConsumer 项目 " + projectId + " 的完成度为:" + completions);
|
|
log.info("------- ManualProjectConsumer 项目 " + projectId + " 的完成度为:" + completions);
|
|
log.info("------- ManualProjectConsumer 项目 " + projectId + " 的并行度为:" + parallelism);
|
|
log.info("------- ManualProjectConsumer 项目 " + projectId + " 的并行度为:" + parallelism);
|
|
- BatchV1Api batchV1Api = new BatchV1Api(apiClient);
|
|
|
|
- V1Job yaml = (V1Job) Yaml.load(new File("/opt/simulation-cloud/simulation-resource-scheduler/conf/job-template.yaml"));
|
|
|
|
-// V1Job yaml = (V1Job) Yaml.load(ResourceUtils.getFile("classpath:kubernetes/template/job-test.yaml"));
|
|
|
|
-// //1 apiVersion
|
|
|
|
-// //2 kind
|
|
|
|
-// //3 metadata
|
|
|
|
-// V1ObjectMeta metadata = yaml.getMetadata();
|
|
|
|
-// metadata.setName("project_" + projectId);
|
|
|
|
-// yaml.setMetadata(metadata);
|
|
|
|
-// //4 job
|
|
|
|
-// V1JobSpec job = yaml.getSpec();
|
|
|
|
-// job.setCompletions(completions); // 这个标准是什么?
|
|
|
|
-// job.setParallelism(parallelism);
|
|
|
|
-// //5 pod
|
|
|
|
-// V1PodSpec v1PodSpec = job.getTemplate().getSpec();
|
|
|
|
-// //6 container
|
|
|
|
-// List<V1Container> containers = v1PodSpec.getContainers();
|
|
|
|
-// for (V1Container container : containers) {
|
|
|
|
-// String name = container.getName();
|
|
|
|
-// if ("vtd".equals(name)) {
|
|
|
|
-// container.setName("vtd_" + projectId);
|
|
|
|
-// }
|
|
|
|
-// if ("algorithm".equals(name)) {
|
|
|
|
-// container.setName("algorithm_" + projectId);
|
|
|
|
-//// container.setImage(dockerImage);
|
|
|
|
-// }
|
|
|
|
-// }
|
|
|
|
-// //4-4 创建
|
|
|
|
-// yaml.setSpec(job);
|
|
|
|
- log.info("------- ManualProjectConsumer 创建 job:" + yaml);
|
|
|
|
- batchV1Api.createNamespacedJob("simulation-cloud", yaml, null, null, null);
|
|
|
|
|
|
+ String jobTemplateYamlPathSource = "/opt/simulation-cloud/simulation-resource-scheduler/job-template/job-template.yaml";
|
|
|
|
+// String jobTemplateYamlPathSource = "D:\\temp\\job-template.yaml";
|
|
|
|
+ String jobTemplateYamlPathTarget = "/opt/simulation-cloud/simulation-resource-scheduler/job-yaml/" + "project-" + projectId + ".yaml";
|
|
|
|
+ String yamlSource = FileUtil.read(jobTemplateYamlPathSource);
|
|
|
|
+ log.info("------- ManualProjectConsumer 模板文件为:" + yamlSource);
|
|
|
|
+ String replace0 = yamlSource.replace("job-cloud-simulation", "project-" + projectId);
|
|
|
|
+ String replace1 = replace0.replace("vtd-container", "vtd-" + projectId);
|
|
|
|
+ String replace2 = replace1.replace("algorithm-container", "algorithm-" + projectId);
|
|
|
|
+ int i = replace2.indexOf("completions:");
|
|
|
|
+ int j = replace2.indexOf("parallelism:");
|
|
|
|
+ StringBuilder stringBuilder = new StringBuilder(replace2);
|
|
|
|
+ stringBuilder.replace(i + "completions: ".length(), i + "completions: ".length() + 1, completions + "");
|
|
|
|
+ stringBuilder.replace(j + "parallelism: ".length(), j + "parallelism: ".length() + 1, parallelism + "");
|
|
|
|
+ String yamlTarget0 = stringBuilder.toString();
|
|
|
|
+ String yamlTarget1 = yamlTarget0.replace("apiVers1on", "apiVersion");
|
|
|
|
+ log.info("------- ManualProjectConsumer 开始执行 yaml 文件" + yamlTarget1);
|
|
|
|
+ FileUtil.writeStringToLocalFile(yamlTarget1, jobTemplateYamlPathTarget);
|
|
|
|
+ LinuxUtil.execute("kubectl apply -f " + jobTemplateYamlPathTarget);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|