Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Refactor][Web]refactor_model_to_typehandler #2390

Merged
merged 12 commits into from
Oct 17, 2023
8 changes: 4 additions & 4 deletions dinky-admin/src/main/java/org/dinky/assertion/Assert.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
package org.dinky.assertion;

import org.dinky.data.exception.BusException;
import org.dinky.data.model.Cluster;
import org.dinky.data.model.ClusterInstance;
import org.dinky.data.model.Jar;
import org.dinky.data.model.Task;

Expand All @@ -31,9 +31,9 @@
*/
public interface Assert {

static void check(Cluster cluster) {
if (cluster.getId() == null) {
throw new BusException("Flink 集群【" + cluster.getId() + "】不存在");
static void check(ClusterInstance clusterInstance) {
if (clusterInstance.getId() == null) {
throw new BusException("Flink 集群【" + clusterInstance.getId() + "】不存在");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@
import org.dinky.data.constant.PermissionConstants;
import org.dinky.data.enums.BusinessType;
import org.dinky.data.enums.Status;
import org.dinky.data.model.Cluster;
import org.dinky.data.result.ProTableResult;
import org.dinky.data.model.ClusterInstance;
import org.dinky.data.result.Result;
import org.dinky.service.ClusterInstanceService;

Expand All @@ -40,6 +39,7 @@
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.fasterxml.jackson.databind.JsonNode;

import cn.dev33.satoken.annotation.SaCheckPermission;
Expand All @@ -53,7 +53,7 @@
/** ClusterInstanceController */
@Slf4j
@RestController
@Api(tags = "Cluster Instance Controller")
@Api(tags = "ClusterInstance Instance Controller")
@RequestMapping("/api/cluster")
@RequiredArgsConstructor
public class ClusterInstanceController {
Expand All @@ -63,29 +63,29 @@ public class ClusterInstanceController {
/**
* added or updated cluster instance
*
* @param cluster {@link Cluster} cluster instance
* @param clusterInstance {@link ClusterInstance} cluster instance
* @return {@link Result}<{@link Void}>
* @throws Exception exception
*/
@PutMapping
@Log(title = "Insert Or Update Cluster Instance", businessType = BusinessType.INSERT_OR_UPDATE)
@ApiOperation("Insert Or Update Cluster Instance")
@ApiImplicitParam(
name = "cluster",
value = "Cluster Instance",
dataType = "Cluster",
name = "clusterInstance",
value = "ClusterInstance Instance",
dataType = "ClusterInstance",
paramType = "body",
required = true,
dataTypeClass = Cluster.class)
dataTypeClass = ClusterInstance.class)
@SaCheckPermission(
value = {
PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_EDIT,
PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_ADD
},
mode = SaMode.OR)
public Result<Void> saveOrUpdateClusterInstance(@RequestBody Cluster cluster) throws Exception {
cluster.setAutoRegisters(false);
clusterInstanceService.registersCluster(cluster);
public Result<Void> saveOrUpdateClusterInstance(@RequestBody ClusterInstance clusterInstance) throws Exception {
clusterInstance.setAutoRegisters(false);
clusterInstanceService.registersCluster(clusterInstance);
return Result.succeed(Status.SAVE_SUCCESS);
}

Expand All @@ -100,7 +100,7 @@ public Result<Void> saveOrUpdateClusterInstance(@RequestBody Cluster cluster) th
@ApiOperation("Update Cluster Instance Status")
@ApiImplicitParam(
name = "id",
value = "Cluster Instance Id",
value = "ClusterInstance Instance Id",
dataType = "Integer",
paramType = "query",
required = true,
Expand Down Expand Up @@ -144,47 +144,48 @@ public Result<Void> deleteClusterInstanceById(@RequestParam Integer id) {
}
}

/**
* list cluster instances
*
* @param para {@link JsonNode} query parameters
* @return {@link ProTableResult}<{@link Cluster}>
*/
@PostMapping
@GetMapping("/list")
@ApiOperation("Cluster Instance List")
@ApiImplicitParam(
name = "para",
value = "Query Parameters",
dataType = "JsonNode",
paramType = "body",
name = "keyword",
value = "Query keyword",
dataType = "String",
paramType = "query",
required = true,
dataTypeClass = JsonNode.class)
public ProTableResult<Cluster> listClusters(@RequestBody JsonNode para) {
return clusterInstanceService.selectForProTable(para);
public Result<List<ClusterInstance>> listClusterInstance(@RequestParam("keyword") String searchKeyWord) {
return Result.succeed(clusterInstanceService.list(new LambdaQueryWrapper<ClusterInstance>()
.like(ClusterInstance::getName, searchKeyWord)
.or()
.like(ClusterInstance::getAlias, searchKeyWord)
.or()
.like(ClusterInstance::getNote, searchKeyWord)));
}

/**
* get all enable cluster instances
*
* @return {@link Result}<{@link List}<{@link Cluster}>>
* @return {@link Result}<{@link List}<{@link ClusterInstance}>>
*/
@GetMapping("/listEnabledAll")
@ApiOperation("Get all enable cluster instances")
public Result<List<Cluster>> listEnabledAllClusterInstance() {
List<Cluster> clusters = clusterInstanceService.listEnabledAllClusterInstance();
return Result.succeed(clusters);
public Result<List<ClusterInstance>> listEnabledAllClusterInstance() {
List<ClusterInstance> clusterInstances = clusterInstanceService.listEnabledAllClusterInstance();
return Result.succeed(clusterInstances);
}

/**
* get session enable cluster instances , this method is {@link Deprecated}
*
* @return {@link Result}<{@link List}<{@link Cluster}>>
* @return {@link Result}<{@link List}<{@link ClusterInstance}>>
*/
@GetMapping("/listSessionEnable")
@ApiOperation(value = "Get Enable Session Cluster", notes = "Get All Enable Cluster Instances Of Session Type")
public Result<List<Cluster>> listSessionEnable() {
List<Cluster> clusters = clusterInstanceService.listSessionEnable();
return Result.succeed(clusters);
@ApiOperation(
value = "Get Enable Session ClusterInstance",
notes = "Get All Enable Cluster Instances Of Session Type")
public Result<List<ClusterInstance>> listSessionEnable() {
List<ClusterInstance> clusterInstances = clusterInstanceService.listSessionEnable();
return Result.succeed(clusterInstances);
}

/**
Expand All @@ -197,9 +198,9 @@ public Result<List<Cluster>> listSessionEnable() {
@ApiOperation("Cluster Instance Heartbeat")
@SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_HEARTBEATS})
public Result<Void> heartbeat() {
List<Cluster> clusters = clusterInstanceService.list();
for (Cluster cluster : clusters) {
clusterInstanceService.registersCluster(cluster);
List<ClusterInstance> clusterInstances = clusterInstanceService.list();
for (ClusterInstance clusterInstance : clusterInstances) {
clusterInstanceService.registersCluster(clusterInstance);
}
return Result.succeed(Status.CLUSTER_INSTANCE_HEARTBEAT_SUCCESS);
}
Expand Down Expand Up @@ -229,7 +230,7 @@ public Result<Integer> recycleCluster() {
@ApiOperation("Cluster Instance Kill")
@ApiImplicitParam(
name = "id",
value = "Cluster Instance Id",
value = "ClusterInstance Instance Id",
dataType = "Integer",
paramType = "query",
required = true,
Expand All @@ -238,22 +239,22 @@ public Result<Integer> recycleCluster() {
@SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_INSTANCE_KILL})
public Result<Void> killClusterInstance(@RequestParam("id") Integer id) {
clusterInstanceService.killCluster(id);
return Result.succeed("Kill Cluster Succeed.");
return Result.succeed("Kill ClusterInstance Succeed.");
}

@PutMapping("/deploySessionClusterInstance")
@Log(title = "Deploy Session Cluster Instance", businessType = BusinessType.INSERT_OR_UPDATE)
@ApiOperation("Deploy Session Cluster Instance")
@ApiImplicitParam(
name = "id",
value = "Cluster Instance Id",
value = "ClusterInstance Instance Id",
dataType = "Integer",
paramType = "query",
required = true,
dataTypeClass = Integer.class,
example = "1")
@SaCheckPermission(value = {PermissionConstants.REGISTRATION_CLUSTER_CONFIG_DEPLOY})
public Result<Cluster> deploySessionClusterInstance(@RequestParam("id") Integer id) {
public Result<ClusterInstance> deploySessionClusterInstance(@RequestParam("id") Integer id) {
return Result.succeed(clusterInstanceService.deploySessionCluster(id), Status.CLUSTER_INSTANCE_DEPLOY);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.dinky.data.model.ClusterConfiguration;
import org.dinky.gateway.model.FlinkClusterConfig;
import org.dinky.mybatis.annotation.Save;
import org.dinky.utils.JsonUtils;

import javax.validation.constraints.NotNull;

Expand Down Expand Up @@ -89,7 +88,7 @@ public static ClusterConfigurationDTO fromBean(ClusterConfiguration bean) {
public ClusterConfiguration toBean() {
ClusterConfiguration clusterConfiguration = new ClusterConfiguration();
BeanUtil.copyProperties(this, clusterConfiguration);
clusterConfiguration.setConfigJson(JsonUtils.toJsonString(this.getConfig()));
clusterConfiguration.setConfigJson(this.getConfig());
return clusterConfiguration;
}
}
52 changes: 24 additions & 28 deletions dinky-admin/src/main/java/org/dinky/data/dto/JobDataDto.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,18 @@

package org.dinky.data.dto;

import org.dinky.data.flink.checkpoint.CheckPointOverView;
import org.dinky.data.flink.config.CheckpointConfigInfo;
import org.dinky.data.flink.config.FlinkJobConfigInfo;
import org.dinky.data.flink.exceptions.FlinkJobExceptionsDetail;
import org.dinky.data.flink.job.FlinkJobDetailInfo;
import org.dinky.data.model.JobHistory;
import org.dinky.data.model.mapping.ClusterConfigurationMapping;
import org.dinky.data.model.mapping.ClusterInstanceMapping;
import org.dinky.utils.JsonUtils;

import java.time.LocalDateTime;

import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.databind.JsonNode;

import cn.hutool.json.JSONUtil;
Expand Down Expand Up @@ -63,38 +66,31 @@ public class JobDataDto {
@ApiModelProperty(value = "Exceptions Detail Object", notes = "Object representing job exceptions details")
private FlinkJobExceptionsDetail exceptions;

@TableField(exist = false)
@ApiModelProperty(value = "Checkpoints Object", notes = "Object representing job checkpoints")
private JsonNode checkpoints;
private CheckPointOverView checkpoints;

@TableField(exist = false)
@ApiModelProperty(value = "Checkpoints Config Object", notes = "Object representing checkpoints configuration")
private JsonNode checkpointsConfig;
private CheckpointConfigInfo checkpointsConfig;

@ApiModelProperty(value = "FlinkJobConfigInfo", notes = "FlinkJobConfigInfo representing job configuration")
@ApiModelProperty(value = "JobConfigInfo", notes = "JobConfigInfo representing job configuration")
private FlinkJobConfigInfo config;

@TableField(exist = false)
@ApiModelProperty(value = "Jar Object", notes = "Object representing the JAR used in the job")
private JsonNode jar;

@TableField(exist = false)
@ApiModelProperty(value = "Cluster Object", notes = "Object representing the cluster")
private JsonNode cluster;
@ApiModelProperty(value = "ClusterInstance Object", notes = "Object representing the cluster")
private ClusterInstanceMapping cluster;

@TableField(exist = false)
@ApiModelProperty(value = "Cluster Configuration Object", notes = "Object representing cluster configuration")
private JsonNode clusterConfiguration;
private ClusterConfigurationMapping clusterConfiguration;

@TableField(exist = false)
@ApiModelProperty(
value = "Error Flag",
dataType = "boolean",
example = "true",
notes = "Flag indicating if there was an error")
private boolean error;

@TableField(exist = false)
@ApiModelProperty(
value = "Error Message",
dataType = "boolean",
Expand All @@ -106,14 +102,14 @@ public JobHistory toJobHistory() {
return JobHistory.builder()
.id(this.id)
.tenantId(this.tenantId)
.jobJson(JSONUtil.toJsonStr(getJob()))
.exceptionsJson(JSONUtil.toJsonStr(getExceptions()))
.checkpointsJson(JSONUtil.toJsonStr(getCheckpoints()))
.checkpointsConfigJson(JSONUtil.toJsonStr(getCheckpointsConfig()))
.configJson(JSONUtil.toJsonStr(getConfig()))
.jobJson(this.job)
.exceptionsJson(this.exceptions)
.checkpointsJson(this.checkpoints)
.checkpointsConfigJson(this.checkpointsConfig)
.configJson(this.config)
.jarJson(JSONUtil.toJsonStr(getJar()))
.clusterJson(JSONUtil.toJsonStr(getCluster()))
.clusterConfigurationJson(JSONUtil.toJsonStr(getClusterConfiguration()))
.clusterJson(this.cluster)
.clusterConfigurationJson(this.clusterConfiguration)
.updateTime(LocalDateTime.now())
.build();
}
Expand All @@ -122,14 +118,14 @@ public static JobDataDto fromJobHistory(JobHistory jobHistory) {
return JobDataDto.builder()
.id(jobHistory.getId())
.tenantId(jobHistory.getTenantId())
.job(JsonUtils.toJavaBean(jobHistory.getJobJson(), FlinkJobDetailInfo.class))
.exceptions(JsonUtils.toJavaBean(jobHistory.getExceptionsJson(), FlinkJobExceptionsDetail.class))
.checkpoints(JsonUtils.parseToJsonNode(jobHistory.getCheckpointsJson()))
.checkpointsConfig(JsonUtils.parseToJsonNode(jobHistory.getCheckpointsConfigJson()))
.config(JsonUtils.toJavaBean(jobHistory.getConfigJson(), FlinkJobConfigInfo.class))
.job(jobHistory.getJobJson())
.exceptions(jobHistory.getExceptionsJson())
.checkpoints(jobHistory.getCheckpointsJson())
.checkpointsConfig(jobHistory.getCheckpointsConfigJson())
.config(jobHistory.getConfigJson())
.jar(JsonUtils.parseToJsonNode(jobHistory.getJarJson()))
.cluster(JsonUtils.parseToJsonNode(jobHistory.getClusterJson()))
.clusterConfiguration(JsonUtils.parseToJsonNode(jobHistory.getClusterConfigurationJson()))
.cluster(jobHistory.getClusterJson())
.clusterConfiguration(jobHistory.getClusterConfigurationJson())
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public class StudioDDLDTO {
private boolean useRemote;

@ApiModelProperty(
value = "Cluster ID",
value = "ClusterInstance ID",
dataType = "Integer",
example = "1",
notes = "The identifier of the cluster")
Expand Down
Loading