Skip to content

Commit

Permalink
Refactor PipelineJobManager (#29207)
Browse files Browse the repository at this point in the history
* Refactor PipelineJobConfigurationManager

* Refactor MigrationJobAPI

* Remove SPI from PipelineJobOption

* Remove SPI from PipelineJobOption

* Refactor PipelineJobManager

* Refactor PipelineJobManager
  • Loading branch information
terrymanu authored Nov 25, 2023
1 parent 40e396d commit bf41095
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -58,19 +58,17 @@ public final class PipelineJobManager {
* Start job.
*
* @param jobConfig job configuration
* @return job id
*/
public Optional<String> start(final PipelineJobConfiguration jobConfig) {
public void start(final PipelineJobConfiguration jobConfig) {
String jobId = jobConfig.getJobId();
ShardingSpherePreconditions.checkState(0 != jobConfig.getJobShardingCount(), () -> new PipelineJobCreationWithInvalidShardingCountException(jobId));
PipelineGovernanceFacade governanceFacade = PipelineAPIFactory.getPipelineGovernanceFacade(PipelineJobIdUtils.parseContextKey(jobId));
if (governanceFacade.getJobFacade().getConfiguration().isExisted(jobId)) {
log.warn("jobId already exists in registry center, ignore, job id is `{}`", jobId);
return Optional.of(jobId);
return;
}
governanceFacade.getJobFacade().getJob().create(jobId, jobOption.getJobClass());
governanceFacade.getJobFacade().getConfiguration().persist(jobId, new PipelineJobConfigurationManager(jobOption).convertToJobConfigurationPOJO(jobConfig));
return Optional.of(jobId);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import lombok.SneakyThrows;
import org.apache.shardingsphere.data.pipeline.api.PipelineDataSourceConfiguration;
import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration;
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeEntry;
import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine;
import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceConfigurationFactory;
Expand All @@ -32,11 +33,11 @@
import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult;
import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException;
import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobConfigurationManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.TransmissionJobManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobConfigurationManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobItemManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineJobManager;
import org.apache.shardingsphere.data.pipeline.core.job.service.TransmissionJobManager;
import org.apache.shardingsphere.data.pipeline.core.metadata.PipelineDataSourcePersistService;
import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI;
import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobOption;
Expand Down Expand Up @@ -129,9 +130,9 @@ static void afterClass() {

@Test
void assertStartAndList() {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
JobConfigurationPOJO jobConfigPOJO = getJobConfigurationPOJO(jobId.get());
PipelineJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
JobConfigurationPOJO jobConfigPOJO = getJobConfigurationPOJO(jobConfig.getJobId());
assertFalse(jobConfigPOJO.isDisabled());
assertThat(jobConfigPOJO.getShardingTotalCount(), is(1));
}
Expand All @@ -142,46 +143,43 @@ private JobConfigurationPOJO getJobConfigurationPOJO(final String jobId) {

@Test
void assertStartOrStopById() {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
assertFalse(getJobConfigurationPOJO(jobId.get()).isDisabled());
PipelineJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
assertFalse(getJobConfigurationPOJO(jobConfig.getJobId()).isDisabled());
PipelineDistributedBarrier mockBarrier = mock(PipelineDistributedBarrier.class);
when(PipelineDistributedBarrier.getInstance(any())).thenReturn(mockBarrier);
jobManager.stop(jobId.get());
assertTrue(getJobConfigurationPOJO(jobId.get()).isDisabled());
jobManager.startDisabledJob(jobId.get());
assertFalse(getJobConfigurationPOJO(jobId.get()).isDisabled());
jobManager.stop(jobConfig.getJobId());
assertTrue(getJobConfigurationPOJO(jobConfig.getJobId()).isDisabled());
jobManager.startDisabledJob(jobConfig.getJobId());
assertFalse(getJobConfigurationPOJO(jobConfig.getJobId()).isDisabled());
}

@Test
void assertRollback() throws SQLException {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
MigrationJobConfiguration jobConfig = jobConfigManager.getJobConfiguration(jobId.get());
initTableData(jobConfig);
PipelineJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
initTableData(jobConfigManager.<MigrationJobConfiguration>getJobConfiguration(jobConfig.getJobId()));
PipelineDistributedBarrier mockBarrier = mock(PipelineDistributedBarrier.class);
when(PipelineDistributedBarrier.getInstance(any())).thenReturn(mockBarrier);
jobAPI.rollback(jobId.get());
assertNull(getJobConfigurationPOJO(jobId.get()));
jobAPI.rollback(jobConfig.getJobId());
assertNull(getJobConfigurationPOJO(jobConfig.getJobId()));
}

@Test
void assertCommit() {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
MigrationJobConfiguration jobConfig = jobConfigManager.getJobConfiguration(jobId.get());
initTableData(jobConfig);
PipelineJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
initTableData(jobConfigManager.<MigrationJobConfiguration>getJobConfiguration(jobConfig.getJobId()));
PipelineDistributedBarrier mockBarrier = mock(PipelineDistributedBarrier.class);
when(PipelineDistributedBarrier.getInstance(any())).thenReturn(mockBarrier);
jobAPI.commit(jobId.get());
assertNull(getJobConfigurationPOJO(jobId.get()));
jobAPI.commit(jobConfig.getJobId());
assertNull(getJobConfigurationPOJO(jobConfig.getJobId()));
}

@Test
void assertGetProgress() {
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
Optional<String> jobId = jobManager.start(jobConfig);
assertTrue(jobId.isPresent());
jobManager.start(jobConfig);
Map<Integer, TransmissionJobItemProgress> jobProgressMap = transmissionJobManager.getJobProgress(jobConfig);
assertThat(jobProgressMap.size(), is(1));
}
Expand All @@ -190,10 +188,9 @@ void assertGetProgress() {
void assertDataConsistencyCheck() {
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
initTableData(jobConfig);
Optional<String> jobId = jobManager.start(jobConfig);
assertTrue(jobId.isPresent());
jobManager.start(jobConfig);
Map<String, TableDataConsistencyCheckResult> checkResultMap = jobOption.buildDataConsistencyChecker(
jobConfig, jobOption.buildProcessContext(jobConfig), new ConsistencyCheckJobItemProgressContext(jobId.get(), 0, "H2")).check("FIXTURE", null);
jobConfig, jobOption.buildProcessContext(jobConfig), new ConsistencyCheckJobItemProgressContext(jobConfig.getJobId(), 0, "H2")).check("FIXTURE", null);
assertThat(checkResultMap.size(), is(1));
String checkKey = "t_order";
assertTrue(checkResultMap.get(checkKey).isMatched());
Expand All @@ -202,12 +199,11 @@ void assertDataConsistencyCheck() {

@Test
void assertSwitchClusterConfigurationSucceed() {
final MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
Optional<String> jobId = jobManager.start(jobConfig);
assertTrue(jobId.isPresent());
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
MigrationJobItemContext jobItemContext = PipelineContextUtils.mockMigrationJobItemContext(jobConfig);
jobItemManager.persistProgress(jobItemContext);
jobItemManager.updateStatus(jobId.get(), jobItemContext.getShardingItem(), JobStatus.EXECUTE_INVENTORY_TASK);
jobItemManager.updateStatus(jobConfig.getJobId(), jobItemContext.getShardingItem(), JobStatus.EXECUTE_INVENTORY_TASK);
Map<Integer, TransmissionJobItemProgress> progress = transmissionJobManager.getJobProgress(jobConfig);
for (Entry<Integer, TransmissionJobItemProgress> entry : progress.entrySet()) {
assertThat(entry.getValue().getStatus(), is(JobStatus.EXECUTE_INVENTORY_TASK));
Expand Down Expand Up @@ -237,7 +233,7 @@ private void initTableData(final DataSource pipelineDataSource) throws SQLExcept

@Test
void assertRenewJobStatus() {
final MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
MigrationJobItemContext jobItemContext = PipelineContextUtils.mockMigrationJobItemContext(jobConfig);
jobItemManager.persistProgress(jobItemContext);
jobItemManager.updateStatus(jobConfig.getJobId(), 0, JobStatus.FINISHED);
Expand Down Expand Up @@ -306,13 +302,13 @@ void assertShowMigrationSourceResources() {

@Test
void assertGetJobItemInfosAtBegin() {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
YamlTransmissionJobItemProgress yamlJobItemProgress = new YamlTransmissionJobItemProgress();
yamlJobItemProgress.setStatus(JobStatus.RUNNING.name());
yamlJobItemProgress.setSourceDatabaseType("MySQL");
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineContextUtils.getContextKey()).getJobItemFacade().getProcess().persist(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress));
List<TransmissionJobItemInfo> jobItemInfos = transmissionJobManager.getJobItemInfos(jobId.get());
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineContextUtils.getContextKey()).getJobItemFacade().getProcess().persist(jobConfig.getJobId(), 0, YamlEngine.marshal(yamlJobItemProgress));
List<TransmissionJobItemInfo> jobItemInfos = transmissionJobManager.getJobItemInfos(jobConfig.getJobId());
assertThat(jobItemInfos.size(), is(1));
TransmissionJobItemInfo jobItemInfo = jobItemInfos.get(0);
assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.RUNNING));
Expand All @@ -321,15 +317,15 @@ void assertGetJobItemInfosAtBegin() {

@Test
void assertGetJobItemInfosAtIncrementTask() {
Optional<String> jobId = jobManager.start(JobConfigurationBuilder.createJobConfiguration());
assertTrue(jobId.isPresent());
MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration();
jobManager.start(jobConfig);
YamlTransmissionJobItemProgress yamlJobItemProgress = new YamlTransmissionJobItemProgress();
yamlJobItemProgress.setSourceDatabaseType("MySQL");
yamlJobItemProgress.setStatus(JobStatus.EXECUTE_INCREMENTAL_TASK.name());
yamlJobItemProgress.setProcessedRecordsCount(100);
yamlJobItemProgress.setInventoryRecordsCount(50);
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineContextUtils.getContextKey()).getJobItemFacade().getProcess().persist(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress));
List<TransmissionJobItemInfo> jobItemInfos = transmissionJobManager.getJobItemInfos(jobId.get());
PipelineAPIFactory.getPipelineGovernanceFacade(PipelineContextUtils.getContextKey()).getJobItemFacade().getProcess().persist(jobConfig.getJobId(), 0, YamlEngine.marshal(yamlJobItemProgress));
List<TransmissionJobItemInfo> jobItemInfos = transmissionJobManager.getJobItemInfos(jobConfig.getJobId());
TransmissionJobItemInfo jobItemInfo = jobItemInfos.get(0);
assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.EXECUTE_INCREMENTAL_TASK));
assertThat(jobItemInfo.getInventoryFinishedPercentage(), is(100));
Expand Down

0 comments on commit bf41095

Please sign in to comment.