This is an automated email from the ASF dual-hosted git repository.

zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 29a74c879ba Refactor job type declarations to use generics for 
improved type safety (#37173)
29a74c879ba is described below

commit 29a74c879baf94354f06f198de0086170a862dd0
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Nov 23 21:25:43 2025 +0800

    Refactor job type declarations to use generics for improved type safety 
(#37173)
---
 .../impl/ConditionValueInOperatorGeneratorTest.java          |  5 +++--
 .../core/job/executor/DistributedPipelineJobExecutor.java    |  4 ++--
 .../data/pipeline/core/job/id/PipelineJobId.java             |  2 +-
 .../listener/PipelineContextManagerLifecycleListener.java    |  2 +-
 .../pipeline/core/task/runner/TransmissionTasksRunner.java   |  2 +-
 .../update/AlterTransmissionRuleExecutorTest.java            |  4 ++--
 .../org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java  |  2 +-
 .../apache/shardingsphere/data/pipeline/cdc/CDCJobId.java    |  3 +--
 .../scenario/consistencycheck/ConsistencyCheckJobId.java     |  3 +--
 .../consistencycheck/task/ConsistencyCheckTasksRunner.java   |  2 +-
 .../task/ConsistencyCheckTasksRunnerTest.java                |  8 ++++----
 .../data/pipeline/scenario/migration/MigrationJobId.java     |  3 +--
 .../mode/repository/cluster/etcd/EtcdRepositoryTest.java     |  3 +++
 .../visitor/statement/PostgreSQLStatementVisitor.java        |  4 ++--
 .../command/query/simple/OpenGaussComQueryExecutorTest.java  |  4 ++--
 .../operation/pipeline/cases/PipelineContainerComposer.java  | 12 ++++++------
 .../pipeline/scenario/migration/api/MigrationJobAPITest.java |  3 +--
 17 files changed, 33 insertions(+), 33 deletions(-)

diff --git 
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java
 
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java
index 2f274de71c6..6c193daa238 100644
--- 
a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java
+++ 
b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java
@@ -52,6 +52,7 @@ class ConditionValueInOperatorGeneratorTest {
     
     private final TimestampServiceRule timestampServiceRule = new 
TimestampServiceRule(new TimestampServiceRuleConfiguration("System", new 
Properties()));
     
+    @SuppressWarnings("UseOfObsoleteDateTimeApi")
     @Test
     void assertNowExpression() {
         ListExpression listExpression = new ListExpression(0, 0);
@@ -71,7 +72,7 @@ class ConditionValueInOperatorGeneratorTest {
         InExpression inExpression = new InExpression(0, 0, null, 
listExpression, false);
         Optional<ShardingConditionValue> shardingConditionValue = 
generator.generate(inExpression, column, new LinkedList<>(), 
timestampServiceRule);
         assertTrue(shardingConditionValue.isPresent());
-        assertThat(((ListShardingConditionValue) 
shardingConditionValue.get()).getValues(), is(Arrays.asList(null, null)));
+        assertThat(((ListShardingConditionValue<?>) 
shardingConditionValue.get()).getValues(), is(Arrays.asList(null, null)));
         
assertTrue(shardingConditionValue.get().getParameterMarkerIndexes().isEmpty());
         assertThat(shardingConditionValue.get().toString(), is("tbl.id in 
(,)"));
     }
@@ -86,7 +87,7 @@ class ConditionValueInOperatorGeneratorTest {
         InExpression inExpression = new InExpression(0, 0, null, 
listExpression, false);
         Optional<ShardingConditionValue> shardingConditionValue = 
generator.generate(inExpression, column, new LinkedList<>(), 
timestampServiceRule);
         assertTrue(shardingConditionValue.isPresent());
-        assertThat(((ListShardingConditionValue) 
shardingConditionValue.get()).getValues(), is(Arrays.asList("test1", null, 
null, "test2")));
+        assertThat(((ListShardingConditionValue<?>) 
shardingConditionValue.get()).getValues(), is(Arrays.asList("test1", null, 
null, "test2")));
         
assertTrue(shardingConditionValue.get().getParameterMarkerIndexes().isEmpty());
         assertThat(shardingConditionValue.get().toString(), is("tbl.id in 
(test1,,,test2)"));
     }
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/executor/DistributedPipelineJobExecutor.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/executor/DistributedPipelineJobExecutor.java
index 807598a9817..92064ff21df 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/executor/DistributedPipelineJobExecutor.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/executor/DistributedPipelineJobExecutor.java
@@ -68,7 +68,7 @@ public final class DistributedPipelineJobExecutor {
             log.info("Job is stopping, ignore.");
             return;
         }
-        PipelineJobType jobType = PipelineJobIdUtils.parseJobType(jobId);
+        PipelineJobType<?> jobType = PipelineJobIdUtils.parseJobType(jobId);
         PipelineContextKey contextKey = 
PipelineJobIdUtils.parseContextKey(jobId);
         PipelineJobConfiguration jobConfig = 
jobType.getOption().getYamlJobConfigurationSwapper().swapToObject(shardingContext.getJobParameter());
         PipelineJobItemManager<PipelineJobItemProgress> jobItemManager = new 
PipelineJobItemManager<>(jobType.getOption().getYamlJobItemProgressSwapper());
@@ -111,7 +111,7 @@ public final class DistributedPipelineJobExecutor {
         return true;
     }
     
-    private TransmissionProcessContext createTransmissionProcessContext(final 
String jobId, final PipelineJobType jobType, final PipelineContextKey 
contextKey) {
+    private TransmissionProcessContext createTransmissionProcessContext(final 
String jobId, final PipelineJobType<?> jobType, final PipelineContextKey 
contextKey) {
         if (!jobType.getOption().isTransmissionJob()) {
             return null;
         }
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/id/PipelineJobId.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/id/PipelineJobId.java
index 5cf1b7d8c14..ee89eca3158 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/id/PipelineJobId.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/id/PipelineJobId.java
@@ -36,7 +36,7 @@ public interface PipelineJobId {
      *
      * @return pipeline job type
      */
-    PipelineJobType getJobType();
+    PipelineJobType<?> getJobType();
     
     /**
      * Get pipeline context key.
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/PipelineContextManagerLifecycleListener.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/PipelineContextManagerLifecycleListener.java
index 609adff8fd4..f4196932d26 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/PipelineContextManagerLifecycleListener.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/PipelineContextManagerLifecycleListener.java
@@ -71,7 +71,7 @@ public final class PipelineContextManagerLifecycleListener 
implements ContextMan
                 .stream().filter(each -> 
!each.getJobName().startsWith("_")).collect(Collectors.toList());
         log.info("All job names: {}", 
allJobsBriefInfo.stream().map(JobBriefInfo::getJobName).collect(Collectors.joining(",")));
         for (JobBriefInfo each : allJobsBriefInfo) {
-            PipelineJobType jobType;
+            PipelineJobType<?> jobType;
             try {
                 jobType = PipelineJobIdUtils.parseJobType(each.getJobName());
             } catch (final IllegalArgumentException ex) {
diff --git 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/TransmissionTasksRunner.java
 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/TransmissionTasksRunner.java
index 87994fb0aa0..8335d52fa64 100644
--- 
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/TransmissionTasksRunner.java
+++ 
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/TransmissionTasksRunner.java
@@ -54,7 +54,7 @@ public final class TransmissionTasksRunner implements 
PipelineTasksRunner {
     
     private final Collection<PipelineTask> incrementalTasks;
     
-    private final PipelineJobType jobType;
+    private final PipelineJobType<?> jobType;
     
     private final PipelineJobItemManager<TransmissionJobItemProgress> 
jobItemManager;
     
diff --git 
a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/data/pipeline/distsql/handler/transmission/update/AlterTransmissionRuleExecutorTest.java
 
b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/data/pipeline/distsql/handler/transmission/update/AlterTransmissionRuleExecutorTest.java
index 1653f2c66ee..3da9e973157 100644
--- 
a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/data/pipeline/distsql/handler/transmission/update/AlterTransmissionRuleExecutorTest.java
+++ 
b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/data/pipeline/distsql/handler/transmission/update/AlterTransmissionRuleExecutorTest.java
@@ -72,7 +72,7 @@ class AlterTransmissionRuleExecutorTest {
     
     @Test
     void assertExecuteUpdate() {
-        PipelineJobType jobType = mock(PipelineJobType.class);
+        PipelineJobType<?> jobType = mock(PipelineJobType.class);
         when(jobType.getType()).thenReturn(JOB_TYPE);
         TransmissionRuleSegment segment = new TransmissionRuleSegment();
         segment.setReadSegment(new ReadOrWriteSegment(5, 1000, 200, new 
AlgorithmSegment("READ_LIMITER", PropertiesBuilder.build(new Property("qps", 
"50")))));
@@ -102,7 +102,7 @@ class AlterTransmissionRuleExecutorTest {
     
     @Test
     void assertExecuteUpdatePersistWhenStreamChannelIsNull() {
-        PipelineJobType jobType = mock(PipelineJobType.class);
+        PipelineJobType<?> jobType = mock(PipelineJobType.class);
         when(jobType.getType()).thenReturn(JOB_TYPE);
         AlterTransmissionRuleStatement sqlStatement = new 
AlterTransmissionRuleStatement(JOB_TYPE, new TransmissionRuleSegment());
         try (MockedStatic<TypedSPILoader> mockedStatic = 
mockStatic(TypedSPILoader.class)) {
diff --git 
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
 
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
index 052d7086c43..c5c850fb096 100644
--- 
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
+++ 
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJob.java
@@ -101,7 +101,7 @@ public final class CDCJob implements PipelineJob {
     public void execute(final ShardingContext shardingContext) {
         String jobId = shardingContext.getJobName();
         log.info("Execute job {}", jobId);
-        PipelineJobType jobType = PipelineJobIdUtils.parseJobType(jobId);
+        PipelineJobType<?> jobType = PipelineJobIdUtils.parseJobType(jobId);
         PipelineContextKey contextKey = 
PipelineJobIdUtils.parseContextKey(jobId);
         CDCJobConfiguration jobConfig = (CDCJobConfiguration) 
jobType.getOption().getYamlJobConfigurationSwapper().swapToObject(shardingContext.getJobParameter());
         PipelineJobItemManager<TransmissionJobItemProgress> jobItemManager = 
new 
PipelineJobItemManager<>(jobType.getOption().getYamlJobItemProgressSwapper());
diff --git 
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJobId.java
 
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJobId.java
index c86734b2c2c..d2b586125fa 100644
--- 
a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJobId.java
+++ 
b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/CDCJobId.java
@@ -22,7 +22,6 @@ import lombok.RequiredArgsConstructor;
 import org.apache.shardingsphere.data.pipeline.cdc.constant.CDCSinkType;
 import org.apache.shardingsphere.data.pipeline.core.context.PipelineContextKey;
 import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobId;
-import org.apache.shardingsphere.data.pipeline.core.job.type.PipelineJobType;
 
 import java.util.List;
 
@@ -33,7 +32,7 @@ import java.util.List;
 @Getter
 public final class CDCJobId implements PipelineJobId {
     
-    private final PipelineJobType jobType = new CDCJobType();
+    private final CDCJobType jobType = new CDCJobType();
     
     private final PipelineContextKey contextKey;
     
diff --git 
a/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobId.java
 
b/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobId.java
index 96236921003..4d28f1176a0 100644
--- 
a/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobId.java
+++ 
b/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobId.java
@@ -20,7 +20,6 @@ package 
org.apache.shardingsphere.data.pipeline.scenario.consistencycheck;
 import lombok.Getter;
 import org.apache.shardingsphere.data.pipeline.core.context.PipelineContextKey;
 import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobId;
-import org.apache.shardingsphere.data.pipeline.core.job.type.PipelineJobType;
 import 
org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.util.ConsistencyCheckSequence;
 
 /**
@@ -29,7 +28,7 @@ import 
org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.util.Co
 @Getter
 public final class ConsistencyCheckJobId implements PipelineJobId {
     
-    private final PipelineJobType jobType = new ConsistencyCheckJobType();
+    private final ConsistencyCheckJobType jobType = new 
ConsistencyCheckJobType();
     
     private final PipelineContextKey contextKey;
     
diff --git 
a/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java
 
b/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java
index 0e4fc7c1a2d..b45a54b5b55 100644
--- 
a/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java
+++ 
b/kernel/data-pipeline/scenario/consistency-check/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java
@@ -55,7 +55,7 @@ import java.util.concurrent.atomic.AtomicReference;
 @Slf4j
 public final class ConsistencyCheckTasksRunner implements PipelineTasksRunner {
     
-    private final PipelineJobType jobType = new ConsistencyCheckJobType();
+    private final ConsistencyCheckJobType jobType = new 
ConsistencyCheckJobType();
     
     private final PipelineJobManager jobManager = new 
PipelineJobManager(jobType);
     
diff --git 
a/kernel/data-pipeline/scenario/consistency-check/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunnerTest.java
 
b/kernel/data-pipeline/scenario/consistency-check/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunnerTest.java
index 77586e173d3..669115c09a4 100644
--- 
a/kernel/data-pipeline/scenario/consistency-check/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunnerTest.java
+++ 
b/kernel/data-pipeline/scenario/consistency-check/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunnerTest.java
@@ -137,7 +137,7 @@ class ConsistencyCheckTasksRunnerTest {
         verify(checkExecutor).stop();
     }
     
-    @SuppressWarnings({"unchecked", "rawtypes"})
+    @SuppressWarnings("unchecked")
     @Test
     void assertRunBlockingPersistResultWhenNotStopping() throws 
ReflectiveOperationException {
         ConsistencyCheckJobItemContext jobItemContext = createJobItemContext();
@@ -147,7 +147,7 @@ class ConsistencyCheckTasksRunnerTest {
         PipelineProcessConfigurationPersistService processConfigPersistService 
= mock(PipelineProcessConfigurationPersistService.class);
         
Plugins.getMemberAccessor().set(ConsistencyCheckTasksRunner.class.getDeclaredField("processConfigPersistService"),
 runner, processConfigPersistService);
         PipelineProcessConfiguration processConfig = new 
PipelineProcessConfiguration(new PipelineReadConfiguration(1, 1, 1, null), new 
PipelineWriteConfiguration(1, 1, null), null);
-        PipelineJobType parentJobType = mock(PipelineJobType.class);
+        PipelineJobType<PipelineJobConfiguration> parentJobType = 
mock(PipelineJobType.class);
         when(parentJobType.getType()).thenReturn("CONSISTENCY_CHECK");
         
when(PipelineJobIdUtils.parseJobType(PARENT_JOB_ID)).thenReturn(parentJobType);
         PipelineJobConfiguration parentJobConfig = 
mock(PipelineJobConfiguration.class);
@@ -178,7 +178,7 @@ class ConsistencyCheckTasksRunnerTest {
         }
     }
     
-    @SuppressWarnings({"unchecked", "rawtypes"})
+    @SuppressWarnings("unchecked")
     @Test
     void assertRunBlockingSkipPersistWhenStopping() throws 
ReflectiveOperationException {
         ConsistencyCheckJobItemContext jobItemContext = createJobItemContext();
@@ -191,7 +191,7 @@ class ConsistencyCheckTasksRunnerTest {
         PipelineProcessConfiguration processConfig = new 
PipelineProcessConfiguration(new PipelineReadConfiguration(1, 1, 1, null),
                 new PipelineWriteConfiguration(1, 1, null), null);
         PipelineDataConsistencyChecker checker = 
mock(PipelineDataConsistencyChecker.class);
-        PipelineJobType parentJobType = mock(PipelineJobType.class);
+        PipelineJobType<PipelineJobConfiguration> parentJobType = 
mock(PipelineJobType.class);
         when(parentJobType.getType()).thenReturn("CONSISTENCY_CHECK");
         
when(PipelineJobIdUtils.parseJobType(PARENT_JOB_ID)).thenReturn(parentJobType);
         PipelineJobConfiguration parentJobConfig = 
mock(PipelineJobConfiguration.class);
diff --git 
a/kernel/data-pipeline/scenario/migration/core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobId.java
 
b/kernel/data-pipeline/scenario/migration/core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobId.java
index 8fe43d4f1ed..1c55c5d7583 100644
--- 
a/kernel/data-pipeline/scenario/migration/core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobId.java
+++ 
b/kernel/data-pipeline/scenario/migration/core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobId.java
@@ -21,7 +21,6 @@ import lombok.Getter;
 import lombok.RequiredArgsConstructor;
 import org.apache.shardingsphere.data.pipeline.core.context.PipelineContextKey;
 import org.apache.shardingsphere.data.pipeline.core.job.id.PipelineJobId;
-import org.apache.shardingsphere.data.pipeline.core.job.type.PipelineJobType;
 
 import java.util.List;
 
@@ -32,7 +31,7 @@ import java.util.List;
 @Getter
 public final class MigrationJobId implements PipelineJobId {
     
-    private final PipelineJobType jobType = new MigrationJobType();
+    private final MigrationJobType jobType = new MigrationJobType();
     
     private final PipelineContextKey contextKey;
     
diff --git 
a/mode/type/cluster/repository/provider/etcd/src/test/java/org/apache/shardingsphere/mode/repository/cluster/etcd/EtcdRepositoryTest.java
 
b/mode/type/cluster/repository/provider/etcd/src/test/java/org/apache/shardingsphere/mode/repository/cluster/etcd/EtcdRepositoryTest.java
index dd87ae1db1a..0e426dea4be 100644
--- 
a/mode/type/cluster/repository/provider/etcd/src/test/java/org/apache/shardingsphere/mode/repository/cluster/etcd/EtcdRepositoryTest.java
+++ 
b/mode/type/cluster/repository/provider/etcd/src/test/java/org/apache/shardingsphere/mode/repository/cluster/etcd/EtcdRepositoryTest.java
@@ -82,9 +82,11 @@ class EtcdRepositoryTest {
     @Mock
     private Lease lease;
     
+    @SuppressWarnings("rawtypes")
     @Mock
     private CompletableFuture getFuture;
     
+    @SuppressWarnings("rawtypes")
     @Mock
     private CompletableFuture leaseFuture;
     
@@ -94,6 +96,7 @@ class EtcdRepositoryTest {
     @Mock
     private GetResponse getResponse;
     
+    @SuppressWarnings("rawtypes")
     @Mock
     private CompletableFuture putFuture;
     
diff --git 
a/parser/sql/engine/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/engine/postgresql/visitor/statement/PostgreSQLStatementVisitor.java
 
b/parser/sql/engine/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/engine/postgresql/visitor/statement/PostgreSQLStatementVisitor.java
index 7ef2e8bbf9e..baa911fc8a3 100644
--- 
a/parser/sql/engine/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/engine/postgresql/visitor/statement/PostgreSQLStatementVisitor.java
+++ 
b/parser/sql/engine/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/engine/postgresql/visitor/statement/PostgreSQLStatementVisitor.java
@@ -730,7 +730,7 @@ public abstract class PostgreSQLStatementVisitor extends 
PostgreSQLStatementPars
         return result;
     }
     
-    @SuppressWarnings({"rawtypes", "unchecked"})
+    @SuppressWarnings("unchecked")
     @Override
     public ASTNode visitQualifiedNameList(final QualifiedNameListContext ctx) {
         CollectionValue<SimpleTableSegment> result = new CollectionValue<>();
@@ -738,7 +738,7 @@ public abstract class PostgreSQLStatementVisitor extends 
PostgreSQLStatementPars
             result.getValue().add((SimpleTableSegment) 
visit(ctx.qualifiedName()));
         }
         if (null != ctx.qualifiedNameList()) {
-            result.combine((CollectionValue) visit(ctx.qualifiedNameList()));
+            result.combine((CollectionValue<SimpleTableSegment>) 
visit(ctx.qualifiedNameList()));
         }
         return result;
     }
diff --git 
a/proxy/frontend/dialect/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/simple/OpenGaussComQueryExecutorTest.java
 
b/proxy/frontend/dialect/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/simple/OpenGaussComQueryExecutorTest.java
index 14df5362ff7..a82692e1ce7 100644
--- 
a/proxy/frontend/dialect/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/simple/OpenGaussComQueryExecutorTest.java
+++ 
b/proxy/frontend/dialect/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/simple/OpenGaussComQueryExecutorTest.java
@@ -82,7 +82,7 @@ class OpenGaussComQueryExecutorTest {
     void assertExecuteQueryAndReturnEmptyResult() throws SQLException {
         QueryResponseHeader queryResponseHeader = 
mock(QueryResponseHeader.class);
         when(proxyBackendHandler.execute()).thenReturn(queryResponseHeader);
-        Collection actual = queryExecutor.execute();
+        Collection<DatabasePacket> actual = queryExecutor.execute();
         assertThat(actual.size(), is(1));
         assertThat(actual.iterator().next(), 
is(isA(PostgreSQLRowDescriptionPacket.class)));
         assertThat(queryExecutor.getResponseType(), is(ResponseType.QUERY));
@@ -94,7 +94,7 @@ class OpenGaussComQueryExecutorTest {
         QueryResponseHeader queryResponseHeader = 
mock(QueryResponseHeader.class);
         
when(queryResponseHeader.getQueryHeaders()).thenReturn(Collections.singletonList(new
 QueryHeader("schema", "table", "label", "column", 1, "type", 2, 3, true, true, 
true, true)));
         when(proxyBackendHandler.execute()).thenReturn(queryResponseHeader);
-        Collection actual = queryExecutor.execute();
+        Collection<DatabasePacket> actual = queryExecutor.execute();
         assertThat(actual.size(), is(1));
         assertThat(actual.iterator().next(), 
is(isA(PostgreSQLRowDescriptionPacket.class)));
         assertThat(queryExecutor.getResponseType(), is(ResponseType.QUERY));
diff --git 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/PipelineContainerComposer.java
 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/PipelineContainerComposer.java
index 508a6fe03de..2f4d7cc278f 100644
--- 
a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/PipelineContainerComposer.java
+++ 
b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/operation/pipeline/cases/PipelineContainerComposer.java
@@ -120,7 +120,7 @@ public final class PipelineContainerComposer implements 
AutoCloseable {
     
     private Thread increaseTaskThread;
     
-    public PipelineContainerComposer(final PipelineTestParameter testParam, 
final PipelineJobType jobType) {
+    public PipelineContainerComposer(final PipelineTestParameter testParam, 
final PipelineJobType<?> jobType) {
         databaseType = testParam.getDatabaseType();
         Type type = 
E2ETestEnvironment.getInstance().getRunEnvironment().getType();
         containerComposer = Type.DOCKER == type
@@ -142,7 +142,7 @@ public final class PipelineContainerComposer implements 
AutoCloseable {
     }
     
     @SneakyThrows(SQLException.class)
-    private void init(final PipelineJobType jobType) {
+    private void init(final PipelineJobType<?> jobType) {
         String jdbcUrl = containerComposer.getProxyJdbcUrl(databaseType 
instanceof PostgreSQLDatabaseType || databaseType instanceof 
OpenGaussDatabaseType ? "postgres" : "");
         try (Connection connection = DriverManager.getConnection(jdbcUrl, 
ProxyContainerConstants.USER, ProxyContainerConstants.PASSWORD)) {
             cleanUpPipelineJobs(connection, jobType);
@@ -154,7 +154,7 @@ public final class PipelineContainerComposer implements 
AutoCloseable {
         cleanUpDataSource();
     }
     
-    private void cleanUpPipelineJobs(final Connection connection, final 
PipelineJobType jobType) throws SQLException {
+    private void cleanUpPipelineJobs(final Connection connection, final 
PipelineJobType<?> jobType) throws SQLException {
         if (Type.NATIVE != 
E2ETestEnvironment.getInstance().getRunEnvironment().getType()) {
             return;
         }
@@ -170,18 +170,18 @@ public final class PipelineContainerComposer implements 
AutoCloseable {
         }
     }
     
-    private String getOperationType(final PipelineJobType jobType, final 
String status) {
+    private String getOperationType(final PipelineJobType<?> jobType, final 
String status) {
         if (JobStatus.FINISHED.name().equals(status)) {
             return isSupportCommit(jobType) ? "COMMIT" : "DROP";
         }
         return isSupportRollback(jobType) ? "ROLLBACK" : "DROP";
     }
     
-    private boolean isSupportCommit(final PipelineJobType jobType) {
+    private boolean isSupportCommit(final PipelineJobType<?> jobType) {
         return !(jobType instanceof CDCJobType);
     }
     
-    private boolean isSupportRollback(final PipelineJobType jobType) {
+    private boolean isSupportRollback(final PipelineJobType<?> jobType) {
         return !(jobType instanceof CDCJobType);
     }
     
diff --git 
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPITest.java
 
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPITest.java
index 139d4f2b181..c057f1090f1 100644
--- 
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPITest.java
+++ 
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPITest.java
@@ -94,7 +94,7 @@ import static org.mockito.Mockito.when;
 @StaticMockSettings(PipelineDistributedBarrier.class)
 class MigrationJobAPITest {
     
-    private static PipelineJobType jobType;
+    private static PipelineJobType<MigrationJobConfiguration> jobType;
     
     private static MigrationJobAPI jobAPI;
     
@@ -187,7 +187,6 @@ class MigrationJobAPITest {
         assertThat(jobProgressMap.size(), is(1));
     }
     
-    @SuppressWarnings("unchecked")
     @Test
     void assertDataConsistencyCheck() {
         MigrationJobConfiguration jobConfig = 
JobConfigurationBuilder.createJobConfiguration();

Reply via email to