Skip to content

Console Output

Skipping 924 KB.. Full Log
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
wait process dm-worker1 exit...
process dm-worker1 already exit
worker1 was killed
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test2"
[Thu May 16 15:19:57 CST 2024] <<<<<< finish DM-034 optimistic >>>>>>
check diff failed 2-th time, retry later
got=2 expected=2
wait process dm-master.test exit...
process dm-master.test already exit
[Thu May 16 15:19:58 CST 2024] <<<<<< start DM-035 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl1_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 3-th time, retry later
run tidb sql failed 1-th time, retry later
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:01 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check diff successfully
restart dm-worker1
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:02 CST 2024] <<<<<< finish DM-035 optimistic >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process worker1 exit...
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:20:03 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process worker1 exit...
process worker1 already exit
[Thu May 16 15:20:03 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/full_mode/source1.yaml"
[Thu May 16 15:20:04 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
restart dm-worker2
wait process dm-master.test exit...
process dm-master.test already exit
wait process worker2 exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/full_mode/source2.yaml"
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:20:06 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-task-2.yaml --remove-meta"
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8263 is alive
begin;
insert into shardddl1.tb2 values (101,101);
insert into shardddl1.tb2 values (102,102);
insert into shardddl1.tb2 values (103,103);
insert into shardddl1.tb2 values (104,104);
insert into shardddl1.tb2 values (105,105);
insert into shardddl1.tb2 values (106,106);
insert into shardddl1.tb2 values (107,107);
insert into shardddl1.tb2 values (108,108);
insert into shardddl1.tb2 values (109,109);
insert into shardddl1.tb2 values (110,110);
commit;
begin;
insert into shardddl1.tb1 values (111,111);
insert into shardddl1.tb1 values (112,112);
insert into shardddl1.tb1 values (113,113);
insert into shardddl1.tb1 values (114,114);
insert into shardddl1.tb1 values (115,115);
insert into shardddl1.tb1 values (116,116);
insert into shardddl1.tb1 values (117,117);
insert into shardddl1.tb1 values (118,118);
insert into shardddl1.tb1 values (119,119);
insert into shardddl1.tb1 values (120,120);
commit;
begin;
insert into shardddl1.tb2 values (121,121);
insert into shardddl1.tb2 values (122,122);
insert into shardddl1.tb2 values (123,123);
insert into shardddl1.tb2 values (124,124);
insert into shardddl1.tb2 values (125,125);
insert into shardddl1.tb2 values (126,126);
insert into shardddl1.tb2 values (127,127);
insert into shardddl1.tb2 values (128,128);
insert into shardddl1.tb2 values (129,129);
insert into shardddl1.tb2 values (130,130);
commit;
begin;
insert into shardddl1.t_1 values (131,131);
insert into shardddl1.t_1 values (132,132);
insert into shardddl1.t_1 values (133,133);
insert into shardddl1.t_1 values (134,134);
insert into shardddl1.t_1 values (135,135);
insert into shardddl1.t_1 values (136,136);
insert into shardddl1.t_1 values (137,137);
insert into shardddl1.t_1 values (138,138);
insert into shardddl1.t_1 values (139,139);
insert into shardddl1.t_1 values (140,140);
commit;
check diff successfully
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:08 CST 2024] <<<<<< test case shardddl1_1 success! >>>>>>
start running case: [shardddl2] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
begin;
insert into shardddl1.tb2 values (201,201);
insert into shardddl1.tb2 values (202,202);
insert into shardddl1.tb2 values (203,203);
insert into shardddl1.tb2 values (204,204);
insert into shardddl1.tb2 values (205,205);
insert into shardddl1.tb2 values (206,206);
insert into shardddl1.tb2 values (207,207);
insert into shardddl1.tb2 values (208,208);
insert into shardddl1.tb2 values (209,209);
insert into shardddl1.tb2 values (210,210);
commit;
begin;
insert into shardddl1.tb1 values (211,211);
insert into shardddl1.tb1 values (212,212);
insert into shardddl1.tb1 values (213,213);
insert into shardddl1.tb1 values (214,214);
insert into shardddl1.tb1 values (215,215);
insert into shardddl1.tb1 values (216,216);
insert into shardddl1.tb1 values (217,217);
insert into shardddl1.tb1 values (218,218);
insert into shardddl1.tb1 values (219,219);
insert into shardddl1.tb1 values (220,220);
commit;
begin;
insert into shardddl1.tb2 values (221,221);
insert into shardddl1.tb2 values (222,222);
insert into shardddl1.tb2 values (223,223);
insert into shardddl1.tb2 values (224,224);
insert into shardddl1.tb2 values (225,225);
insert into shardddl1.tb2 values (226,226);
insert into shardddl1.tb2 values (227,227);
insert into shardddl1.tb2 values (228,228);
insert into shardddl1.tb2 values (229,229);
insert into shardddl1.tb2 values (230,230);
commit;
begin;
insert into shardddl1.t_1 values (231,231);
insert into shardddl1.t_1 values (232,232);
insert into shardddl1.t_1 values (233,233);
insert into shardddl1.t_1 values (234,234);
insert into shardddl1.t_1 values (235,235);
insert into shardddl1.t_1 values (236,236);
insert into shardddl1.t_1 values (237,237);
insert into shardddl1.t_1 values (238,238);
insert into shardddl1.t_1 values (239,239);
insert into shardddl1.t_1 values (240,240);
commit;
check diff failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:08 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
process dm-master.test already exit
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:10 CST 2024] <<<<<< finish DM-RESYNC_TXN_INTERRUPT optimistic >>>>>>
[Thu May 16 15:20:10 CST 2024] <<<<<< start DM-STRICT_OPTIMISTIC_SINGLE_SOURCE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/single-source-strict-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:20:10 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl2/source1.yaml"
[Thu May 16 15:20:12 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
[Thu May 16 15:20:12 CST 2024] <<<<<< finish DM-STRICT_OPTIMISTIC_SINGLE_SOURCE optimistic >>>>>>
[Thu May 16 15:20:12 CST 2024] <<<<<< start DM-STRICT_OPTIMISTIC_DOUBLE_SOURCE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-strict-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl2/source2.yaml"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:13 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
[Thu May 16 15:20:14 CST 2024] <<<<<< start DM-DROP_COLUMN_EXEC_ERROR optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
got=1 expected=1
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:14 CST 2024] <<<<<< finish DM-STRICT_OPTIMISTIC_DOUBLE_SOURCE optimistic >>>>>>
[Thu May 16 15:20:14 CST 2024] <<<<<< start DM-131 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker 1
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:20:15 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
wait process dm-worker1 exit...
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:20:16 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Thu May 16 15:20:17 CST 2024] <<<<<< finish DM-131 optimistic >>>>>>
wait process dm-worker1 exit...
process dm-worker1 already exit
[Thu May 16 15:20:17 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/full_mode/source1.yaml"
[Thu May 16 15:20:18 CST 2024] <<<<<< start DM-132 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8262 is alive
check log contain failed 1-th time, retry later
dmctl test cmd: "operate-source create /tmp/dm_test/full_mode/source2.yaml"
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/full_mode/conf/dm-task.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=1 expected=1
restart dm-master
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check dump files have been cleaned
ls: cannot access /tmp/dm_test/full_mode/worker1/dumped_data.test: No such file or directory
worker1 auto removed dump files
ls: cannot access /tmp/dm_test/full_mode/worker2/dumped_data.test: No such file or directory
worker2 auto removed dump files
wait process dm-master exit...
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:22 CST 2024] <<<<<< finish DM-132 pessimistic >>>>>>
[Thu May 16 15:20:22 CST 2024] <<<<<< start DM-132 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:24 CST 2024] <<<<<< finish DM-132 optimistic >>>>>>
wait process dm-worker.test exit...
[Thu May 16 15:20:25 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Thu May 16 15:20:25 CST 2024] <<<<<< start DM-133 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:26 CST 2024] <<<<<< test case full_mode success! >>>>>>
start running case: [gbk] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:20:27 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:20:28 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
got=1 expected=1
got=1 expected=1
restart dm-worker 1
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-worker1 exit...
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:20:29 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:29 CST 2024] <<<<<< finish DM-133 pessimistic >>>>>>
[Thu May 16 15:20:29 CST 2024] <<<<<< start DM-133 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker1 exit...
process dm-worker1 already exit
[Thu May 16 15:20:30 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/gbk/source1.yaml"
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
check diff failed 1-th time, retry later
dmctl test cmd: "operate-source create /tmp/dm_test/gbk/source2.yaml"
[Thu May 16 15:20:31 CST 2024] <<<<<< finish DM-133 optimistic >>>>>>
[Thu May 16 15:20:32 CST 2024] <<<<<< start DM-134 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
prepare data
start task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-task.yaml --remove-meta"
check diff successfully
prepare incremental data
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:33 CST 2024] <<<<<< finish DM-DROP_COLUMN_EXEC_ERROR optimistic >>>>>>
[Thu May 16 15:20:33 CST 2024] <<<<<< start DM-INIT_SCHEMA optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
check incremental phase
check diff successfully
ERROR 1146 (42S02) at line 1: Table 'gbk.ddl1' doesn't exist
run tidb sql failed 1-th time, retry later
check diff failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
check log contain failed 1-th time, retry later
ERROR 1146 (42S02) at line 1: Table 'gbk.ddl2_copy' doesn't exist
run tidb sql failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:36 CST 2024] <<<<<< finish DM-134 pessimistic >>>>>>
[Thu May 16 15:20:36 CST 2024] <<<<<< start DM-134 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
restart dm-master
ERROR 1146 (42S02) at line 1: Table 'gbk.ddl3' doesn't exist
run tidb sql failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master exit...
[Thu May 16 15:20:38 CST 2024] <<<<<< finish DM-134 optimistic >>>>>>
wait process dm-master exit...
process dm-master already exit
[Thu May 16 15:20:39 CST 2024] <<<<<< start DM-135 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
ERROR 1049 (42000) at line 1: Unknown database 'gbk3'
run tidb sql failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:41 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
[Thu May 16 15:20:41 CST 2024] <<<<<< finish DM-135 pessimistic >>>>>>
[Thu May 16 15:20:41 CST 2024] <<<<<< start DM-135 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
ERROR 1146 (42S02) at line 1: Table 'gbk.ddl4' doesn't exist
run tidb sql failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Thu May 16 15:20:43 CST 2024] <<<<<< finish DM-135 optimistic >>>>>>
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
check diff successfully
dmctl test cmd: "stop-task test"
prepare data for invalid connection test
check log contain failed 1-th time, retry later
[Thu May 16 15:20:44 CST 2024] <<<<<< start DM-136 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
[Thu May 16 15:20:44 CST 2024] <<<<<< finish DM-INIT_SCHEMA optimistic >>>>>>
[Thu May 16 15:20:44 CST 2024] <<<<<< start DM-DROP_COLUMN_ALL_DONE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker 2
[Thu May 16 15:20:46 CST 2024] <<<<<< finish DM-136 optimistic >>>>>>
wait process dm-worker.test exit...
wait process dm-worker2 exit...
[Thu May 16 15:20:47 CST 2024] <<<<<< start DM-137 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker2 exit...
process dm-worker2 already exit
[Thu May 16 15:20:48 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:20:48 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:20:49 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
check log contain failed 1-th time, retry later
[Thu May 16 15:20:49 CST 2024] <<<<<< finish DM-137 optimistic >>>>>>
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status running
check count
check diff successfully
check test invalid connection with status running successfully
[Thu May 16 15:20:50 CST 2024] <<<<<< start DM-138 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=1 expected=1
restart dm-master
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
wait process dm-master exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:20:53 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:20:54 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:54 CST 2024] <<<<<< finish DM-138 pessimistic >>>>>>
[Thu May 16 15:20:54 CST 2024] <<<<<< start DM-138 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status queueing
[Thu May 16 15:20:55 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check count
check diff successfully
check test invalid connection with status queueing successfully
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:20:56 CST 2024] <<<<<< finish DM-138 optimistic >>>>>>
wait process dm-worker.test exit...
[Thu May 16 15:20:57 CST 2024] <<<<<< start DM-139 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:20:57 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
restart dm-worker 2
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:20:58 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
wait process dm-worker2 exit...
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status none
check count
check diff successfully
check test invalid connection with status none successfully
wait process dm-worker2 exit...
process dm-worker2 already exit
[Thu May 16 15:21:00 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:01 CST 2024] <<<<<< finish DM-139 pessimistic >>>>>>
[Thu May 16 15:21:01 CST 2024] <<<<<< start DM-139 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:03 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Thu May 16 15:21:03 CST 2024] <<<<<< finish DM-139 optimistic >>>>>>
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:04 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff failed 2-th time, retry later
[Thu May 16 15:21:04 CST 2024] <<<<<< start DM-142 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
start test inserting data after invalid connection
check count
check diff successfully
check test inserting data after invalid connection successfully
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:06 CST 2024] <<<<<< finish DM-DROP_COLUMN_ALL_DONE optimistic >>>>>>
[Thu May 16 15:21:06 CST 2024] <<<<<< start DM-RECOVER_LOCK optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:07 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check log contain failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:08 CST 2024] <<<<<< finish DM-142 pessimistic >>>>>>
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:08 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Thu May 16 15:21:09 CST 2024] <<<<<< start DM-143 pessimistic >>>>>>
rpc addr 127.0.0.1:8263 is alive
start test adding UNIQUE on column with duplicate data
check cancelled error
dmctl test cmd: "query-status gbk"
got=0 expected=1
command: query-status gbk origin SQL: \[ALTER TABLE gbk.invalid_conn_test1 ADD UNIQUE(i)\]: DDL ALTER TABLE `gbk`.`invalid_conn_test1` ADD UNIQUE(`i`) executed in background and met error count: 0 != expected: 1, failed the 0-th time, will retry again
check log contain failed 1-th time, retry later
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
got=1 expected=1
dmctl test cmd: "resume-task gbk"
got=3 expected=3
check test adding UNIQUE on column with duplicate data successfully
restart dm-master
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
wait process dm-master exit...
[Thu May 16 15:21:13 CST 2024] <<<<<< finish DM-143 pessimistic >>>>>>
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:14 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master exit...
process dm-master already exit
[Thu May 16 15:21:14 CST 2024] <<<<<< start DM-145 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:16 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Thu May 16 15:21:16 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status running (multi-schema change)
check count 1
run tidb sql failed 1-th time, retry later
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:18 CST 2024] <<<<<< finish DM-145 pessimistic >>>>>>
[Thu May 16 15:21:18 CST 2024] <<<<<< start DM-145 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8261 is alive
restart dm-master
check count 2
check diff successfully
check test invalid connection with status running (multi-schema change) successfully
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:20 CST 2024] <<<<<< finish DM-145 optimistic >>>>>>
wait process dm-worker.test exit...
wait process dm-master exit...
process dm-master already exit
[Thu May 16 15:21:21 CST 2024] <<<<<< start DM-146 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:21 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:22 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:22 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Thu May 16 15:21:23 CST 2024] <<<<<< finish DM-146 pessimistic >>>>>>
[Thu May 16 15:21:23 CST 2024] <<<<<< start DM-146 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status queueing (multi-schema change)
check count 1
run tidb sql failed 1-th time, retry later
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:25 CST 2024] <<<<<< finish DM-146 optimistic >>>>>>
rpc addr 127.0.0.1:8261 is alive
check diff failed 1-th time, retry later
check count 2
check diff successfully
check test invalid connection with status queueing (multi-schema change) successfully
[Thu May 16 15:21:26 CST 2024] <<<<<< start DM-147 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "shard-ddl-lock"
dmctl test cmd: "query-status test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "stop-task test"
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 /tmp/dm_test/shardddl4_1/schema.sql -s mysql-replica-01"
[Thu May 16 15:21:27 CST 2024] <<<<<< finish DM-RECOVER_LOCK optimistic >>>>>>
run DM_DropAddColumn case #0
[Thu May 16 15:21:27 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:27 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "binlog replace test "alter table shardddl1.tb1 drop column b""
got=2 expected=2
got=1 expected=1
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:28 CST 2024] <<<<<< finish DM-147 optimistic >>>>>>
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:28 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
[Thu May 16 15:21:29 CST 2024] <<<<<< start DM-148 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
start test invalid connection with status none (multi-schema change)
check count 1
check count 2
check diff successfully
check test invalid connection with status none (multi-schema change) successfully
dmctl test cmd: "query-status test"
wait process dm-worker.test exit...
got=2 expected=2
check diff failed 1-th time, retry later
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:33 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:33 CST 2024] <<<<<< finish DM-148 pessimistic >>>>>>
[Thu May 16 15:21:33 CST 2024] <<<<<< start DM-148 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
use sync_diff_inspector to check increment data
check diff successfully
check diff successfully
data checked after one worker was killed
try to kill worker port 8263
wait process dm-worker2 exit...
process dm-worker2 already exit
worker2 was killed
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test2"
got=2 expected=2
[Thu May 16 15:21:33 CST 2024] <<<<<< finish test_multi_task_reduce_and_restart_worker >>>>>>
3 dm-master alive
3 dm-worker alive
0 dm-syncer alive
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:34 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-master.test exit...
check diff failed 2-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8263 is alive
start test inserting data after invalid connection (multi-schema change)
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
check count 1
run tidb sql failed 1-th time, retry later
[Thu May 16 15:21:35 CST 2024] <<<<<< finish DM-148 optimistic >>>>>>
wait process dm-master.test exit...
[Thu May 16 15:21:36 CST 2024] <<<<<< start DM-149 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
check diff failed 3-th time, retry later
check count 2
wait process dm-master.test exit...
check diff successfully
check test inserting data after invalid connection (multi-schema change) successfully
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-worker.test exit...
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:39 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #1
[Thu May 16 15:21:39 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Thu May 16 15:21:39 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:40 CST 2024] <<<<<< finish DM-149 pessimistic >>>>>>
[Thu May 16 15:21:40 CST 2024] <<<<<< start DM-149 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8262 is alive
[Thu May 16 15:21:40 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gbk/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff successfully
restart dm-master
wait process dm-master.test exit...
rpc addr 127.0.0.1:8263 is alive
start test adding UNIQUE on column with duplicate data (multi-schema change)
check cancelled error
dmctl test cmd: "query-status gbk"
got=0 expected=1
command: query-status gbk origin SQL: \[ALTER TABLE gbk.invalid_conn_test1 ADD UNIQUE(k), ADD UNIQUE(m)\]: DDL ALTER TABLE `gbk`.`invalid_conn_test1` ADD UNIQUE(`k`) executed in background and met error count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-master exit...
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:21:42 CST 2024] <<<<<< test case ha_cases2 success! >>>>>>
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-release-7.5-pull_dm_integration_test-366/tiflow-dm already exists)
check diff successfully
dmctl test cmd: "stop-task test"
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Thu May 16 15:21:42 CST 2024] <<<<<< finish DM-149 optimistic >>>>>>
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
wait process dm-master exit...
process dm-master already exit
[Thu May 16 15:21:43 CST 2024] <<<<<< start DM-150 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
got=1 expected=1
check test adding UNIQUE on column with duplicate data (multi-schema change) successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
[Thu May 16 15:21:45 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "show-ddl-locks"
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
process dm-master.test already exit
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8261 is alive
check log contain failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:48 CST 2024] <<<<<< finish DM-150 pessimistic >>>>>>
[Thu May 16 15:21:48 CST 2024] <<<<<< start DM-150 optimistic >>>>>>
wait process dm-worker.test exit...
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:21:48 CST 2024] <<<<<< test case gbk success! >>>>>>
start running case: [gtid] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:21:49 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=3 expected=3
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
[Thu May 16 15:21:50 CST 2024] <<<<<< finish DM-150 optimistic >>>>>>
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:21:51 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Thu May 16 15:21:51 CST 2024] <<<<<< start DM-151 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/gtid/source1.yaml"
[Thu May 16 15:21:52 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff failed 2-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "show-ddl-locks"
got=1 expected=1
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/gtid/source2.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-task.yaml --remove-meta"
check diff failed 3-th time, retry later
check diff successfully
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:57 CST 2024] <<<<<< finish DM-151 pessimistic >>>>>>
[Thu May 16 15:21:57 CST 2024] <<<<<< start DM-151 optimistic >>>>>>
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:21:57 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #2
[Thu May 16 15:21:57 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=3 expected=3
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
restart dm-master
wait process dm-worker.test exit...
wait process dm-master exit...
wait process dm-worker.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:22:01 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:22:02 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Thu May 16 15:22:03 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/gtid/source1.yaml"
[Thu May 16 15:22:04 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Thu May 16 15:22:04 CST 2024] <<<<<< finish DM-151 optimistic >>>>>>
[Thu May 16 15:22:05 CST 2024] <<<<<< start DM-152 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/gtid/source2.yaml"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-task.yaml --remove-meta"
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
dmctl test cmd: "query-status test"
got=2 expected=2
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
check diff failed 1-th time, retry later
check diff successfully
new_gtid1 2dbf9433-1353-11ef-89bc-cab4318fccf0:6 new_gtid2 2e35b602-1353-11ef-b6f1-cab4318fccf0:6
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:22:08 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker1.toml >>>>>>
[Thu May 16 15:22:08 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff failed 2-th time, retry later
rpc addr 127.0.0.1:8262 is alive
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-task.yaml"
check diff successfully
check diff failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
check diff successfully
check diff failed 1-th time, retry later
check diff failed 3-th time, retry later
[Thu May 16 15:22:12 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker1.toml >>>>>>
[Thu May 16 15:22:12 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8262 is alive
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/gtid/conf/dm-task.yaml"
check diff successfully
[Thu May 16 15:22:13 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #3
[Thu May 16 15:22:13 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
check diff successfully
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
wait process dm-worker.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
restart dm-master
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:22:20 CST 2024] <<<<<< finish DM-152 optimistic >>>>>>
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:22:20 CST 2024] <<<<<< test case gtid success! >>>>>>
start running case: [ha_cases] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/run.sh...
Verbose mode = false
wait process dm-master exit...
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Thu May 16 15:22:20 CST 2024] <<<<<< start test_exclusive_relay >>>>>>
start DM worker and master cluster
[Thu May 16 15:22:20 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-standalone.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Thu May 16 15:22:21 CST 2024] <<<<<< start DM-153 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:22:21 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source1.yaml"
[Thu May 16 15:22:22 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 -s mysql-replica-01 --from-target"
dmctl test cmd: "binlog replace test "alter table shardddl1.tb1 drop column b""
got=2 expected=2
got=1 expected=1
check diff successfully
dmctl test cmd: "stop-task test"
[Thu May 16 15:22:23 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-relay -s mysql-replica-01 worker1 worker2"
[Thu May 16 15:22:23 CST 2024] <<<<<< finish DM-153 optimistic >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Thu May 16 15:22:24 CST 2024] <<<<<< start DM-154 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
got=3 expected=3
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source2.yaml"
dmctl test cmd: "list-member --worker"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "operate-source show -s mysql-replica-02"
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 -s mysql-replica-01 --from-source"
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
[Thu May 16 15:22:27 CST 2024] <<<<<< finish DM-154 optimistic >>>>>>
wait process dm-master.test exit...
process dm-master.test already exit
check diff failed 2-th time, retry later
[Thu May 16 15:22:28 CST 2024] <<<<<< start DM-155 optimistic >>>>>>
wait process dm-worker.test exit...
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
check diff failed 3-th time, retry later
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
dmctl test cmd: "query-status test"
got=2 expected=2
restart worker2
restart dm-worker2
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
wait process worker2 exit...
[Thu May 16 15:22:32 CST 2024] <<<<<< finish test_exclusive_relay >>>>>>
[Thu May 16 15:22:32 CST 2024] <<<<<< start test_exclusive_relay_2 >>>>>>
start DM worker and master cluster
[Thu May 16 15:22:32 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-standalone.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Thu May 16 15:22:34 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #4
[Thu May 16 15:22:34 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:22:33 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
rpc addr 127.0.0.1:8261 is alive
[Thu May 16 15:22:35 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source1.yaml"
[Thu May 16 15:22:36 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source2.yaml"
dmctl test cmd: "shard-ddl-lock"
restart worker2
restart dm-worker2
got=1 expected=1
restart dm-master
wait process worker2 exit...
dmctl test cmd: "start-relay -s mysql-replica-01 worker1"
wait process dm-master exit...
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:22:39 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
got=2 expected=2
dmctl test cmd: "start-relay -s mysql-replica-02 worker2"
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8263 is alive
got=2 expected=2
[Thu May 16 15:22:40 CST 2024] <<<<<< START DM-WORKER on port 8264, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker3.toml >>>>>>
wait for rpc addr 127.0.0.1:8264 alive the 1-th time
restart worker2
restart dm-worker2
[Thu May 16 15:22:41 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8264 is alive
kill dm-worker1
wait process worker2 exit...
wait process dm-worker1 exit...
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:22:43 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker1 exit...
process dm-worker1 already exit
dmctl test cmd: "list-member --name worker3"
got=1 expected=1
[Thu May 16 15:22:44 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "list-member --name worker3"
got=1 expected=1
dmctl test cmd: "list-member --name worker1"
got=1 expected=1
kill dm-worker2
restart worker2
restart dm-worker2
wait process dm-worker2 exit...
wait process worker2 exit...
check diff failed 2-th time, retry later
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:22:47 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "operate-source show -s mysql-replica-02"
got=1 expected=1
[Thu May 16 15:22:47 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "list-member --name worker2"
got=1 expected=1
1 dm-master alive
3 dm-worker alive
0 dm-syncer alive
rpc addr 127.0.0.1:8263 is alive
check diff failed 3-th time, retry later
wait process dm-master.test exit...
restart worker1
restart dm-worker1
wait process dm-master.test exit...
process dm-master.test already exit
wait process worker1 exit...
wait process worker1 exit...
process worker1 already exit
[Thu May 16 15:22:51 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
[Thu May 16 15:22:52 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #5
[Thu May 16 15:22:52 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
restart worker2
restart dm-worker2
dmctl test cmd: "shard-ddl-lock"
wait process worker2 exit...
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
restart dm-master
[Thu May 16 15:22:55 CST 2024] <<<<<< finish test_exclusive_relay_2 >>>>>>
[Thu May 16 15:22:55 CST 2024] <<<<<< start test_last_bound >>>>>>
[Thu May 16 15:22:55 CST 2024] <<<<<< start test_running >>>>>>
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:22:56 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-master exit...
rpc addr 127.0.0.1:8263 is alive
wait process dm-master exit...
process dm-master already exit
import prepare data
start DM worker and master cluster
[Thu May 16 15:22:57 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master1.toml >>>>>>
[Thu May 16 15:22:57 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master2.toml >>>>>>
[Thu May 16 15:22:57 CST 2024] <<<<<< START DM-MASTER on port 8461, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master3.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
restart worker2
restart dm-worker2
[Thu May 16 15:23:00 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process worker2 exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:23:00 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
rpc addr 127.0.0.1:8361 is alive
rpc addr 127.0.0.1:8461 is alive
start worker and operate mysql config to worker
[Thu May 16 15:23:01 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source1.yaml"
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
restart worker2
restart dm-worker2
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 0-th time, will retry again
[Thu May 16 15:23:04 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process worker2 exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source2.yaml"
wait process worker2 exit...
process worker2 already exit
[Thu May 16 15:23:05 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 1-th time, will retry again
rpc addr 127.0.0.1:8263 is alive
check log contain failed 1-th time, retry later
start DM task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-task.yaml "
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 2-th time, will retry again
dmctl test cmd: "query-status test"
got=2 expected=2
got=2 expected=2
use sync_diff_inspector to check full dump loader
check diff successfully
flush logs to force rotate binlog file
apply increment data before restart dm-worker to ensure entering increment phase
check log contain failed 1-th time, retry later
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 3-th time, will retry again
check log contain failed 1-th time, retry later
use sync_diff_inspector to check increment data
check diff successfully
[Thu May 16 15:23:11 CST 2024] <<<<<< finish test_running >>>>>>
worker1bound  "mysql-replica-01"
worker2bound  "mysql-replica-02"
dmctl test cmd: "start-relay -s mysql-replica-01 worker1"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 4-th time, will retry again
restart worker1
restart dm-worker1
got=2 expected=2
dmctl test cmd: "start-relay -s mysql-replica-02 worker2"
wait process worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 5-th time, will retry again
got=2 expected=2
dmctl test cmd: "query-status test"
got=4 expected=4
kill dm-worker1
wait process worker1 exit...
process worker1 already exit
[Thu May 16 15:23:14 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-worker1 exit...
rpc addr 127.0.0.1:8262 is alive
check log contain failed 1-th time, retry later
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-worker1 exit...
process dm-worker1 already exit
kill dm-worker2
wait process dm-worker2 exit...
check log contain failed 1-th time, retry later
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 7-th time, will retry again
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "list-member --name worker1 --name worker2"
got=2 expected=2
start worker1
[Thu May 16 15:23:18 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
start worker2
[Thu May 16 15:23:19 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check log contain failed 1-th time, retry later
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 8-th time, will retry again
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "list-member --name worker1 --name worker2"
got=1 expected=1
got=1 expected=1
kill dm-worker1
restart master
restart dm-master
wait process dm-worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 9-th time, will retry again
wait process dm-master exit...
wait process dm-worker1 exit...
process dm-worker1 already exit
kill dm-worker2
wait process dm-master exit...
process dm-master already exit
{
    "result": true,
    "msg": "",
    "sources": [
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-01",
                "worker": "worker1",
                "result": null,
                "relayStatus": null
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Paused",
                    "unit": "Sync",
                    "result": {
                        "isCanceled": false,
                        "errors": [
                            {
                                "ErrCode": 42501,
                                "ErrClass": "ha",
                                "ErrScope": "internal",
                                "ErrLevel": "high",
                                "Message": "startLocation: [position: (dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 42228), gtid-set: b72280d8-1353-11ef-899e-425433a60b0f:1-194], endLocation: [position: (dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 42353), gtid-set: b72280d8-1353-11ef-899e-425433a60b0f:1-195], origin SQL: [alter table shardddl1.tb1 add column b int after a]: fail to do etcd txn operation: txn commit failed",
                                "RawCause": "rpc error: code = Unavailable desc = error reading from server: EOF",
                                "Workaround": "Please check dm-master's node status and the network between this node and dm-master"
                            }
                        ],
                        "detail": null
                    },
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "12",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 42353)",
                        "masterBinlogGtid": "b72280d8-1353-11ef-899e-425433a60b0f:1-195",
                        "syncerBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 42163)",
                        "syncerBinlogGtid": "b72280d8-1353-11ef-899e-425433a60b0f:1-194",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "remote",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "12",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        },
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-02",
                "worker": "worker2",
                "result": null,
                "relayStatus": {
                    "masterBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 39206)",
                    "masterBinlogGtid": "b798cda5-1353-11ef-a9d8-425433a60b0f:1-167",
                    "relaySubDir": "b798cda5-1353-11ef-a9d8-425433a60b0f.000001",
                    "relayBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 39206)",
                    "relayBinlogGtid": "b798cda5-1353-11ef-a9d8-425433a60b0f:1-167",
                    "relayCatchUpMaster": true,
                    "stage": "Running",
                    "result": null
                }
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Running",
                    "unit": "Sync",
                    "result": null,
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "6",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin.000001, 39206)",
                        "masterBinlogGtid": "b798cda5-1353-11ef-a9d8-425433a60b0f:1-167",
                        "syncerBinlog": "(dm-it-f4ba626f-d795-4761-8403-9a01c89a9d2a-2hq7z-xlzm3-bin|000001.000001, 38926)",
                        "syncerBinlogGtid": "b798cda5-1353-11ef-a9d8-425433a60b0f:1-166",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "local",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "6",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        }
    ]
}
PASS
coverage: 4.0% of statements in github.com/pingcap/tiflow/dm/...
curl: (7) Failed connect to 127.0.0.1:8361; Connection refused
curl: (7) Failed connect to 127.0.0.1:8461; Connection refused
curl: (7) Failed connect to 127.0.0.1:8561; Connection refused
curl: (7) Failed connect to 127.0.0.1:8661; Connection refused
curl: (7) Failed connect to 127.0.0.1:8761; Connection refused
curl: (7) Failed connect to 127.0.0.1:8264; Connection refused
curl: (7) Failed connect to 127.0.0.1:18262; Connection refused
curl: (7) Failed connect to 127.0.0.1:18263; Connection refused
make: *** [dm_integration_test_in_group] Error 1
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
wait process dm-worker2 exit...
[Pipeline] }
[Pipeline] // dir
Post stage
[Pipeline] sh
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "list-member --name worker1 --name worker2"
got=2 expected=2
start worker2
[Thu May 16 15:23:25 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Thu May 16 15:23:25 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
start worker1
[Thu May 16 15:23:26 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
+ ls /tmp/dm_test
cov.shardddl1.dmctl.1715843605.687.out
cov.shardddl1.dmctl.1715843607.820.out
cov.shardddl1.dmctl.1715843611.1107.out
cov.shardddl1.dmctl.1715843612.1151.out
cov.shardddl1.dmctl.1715843635.4305.out
cov.shardddl1.dmctl.1715843638.4638.out
cov.shardddl1.dmctl.1715843640.4685.out
cov.shardddl1.dmctl.1715843672.5196.out
cov.shardddl1.dmctl.1715843675.5527.out
cov.shardddl1.dmctl.1715843677.5568.out
cov.shardddl1.dmctl.1715843694.6189.out
cov.shardddl1.dmctl.1715843699.6516.out
cov.shardddl1.dmctl.1715843700.6562.out
cov.shardddl1.dmctl.1715843700.6655.out
cov.shardddl1.dmctl.1715843701.6800.out
cov.shardddl1.dmctl.1715843702.6843.out
cov.shardddl1.dmctl.1715843704.6957.out
cov.shardddl1.dmctl.1715843704.7111.out
cov.shardddl1.dmctl.1715843706.7165.out
cov.shardddl1.dmctl.1715843706.7288.out
cov.shardddl1.dmctl.1715843706.7329.out
cov.shardddl1.dmctl.1715843706.7372.out
cov.shardddl1.dmctl.1715843707.7415.out
cov.shardddl1.dmctl.1715843708.7471.out
cov.shardddl1.dmctl.1715843716.7677.out
cov.shardddl1.dmctl.1715843719.7752.out
cov.shardddl1.dmctl.1715843719.7791.out
cov.shardddl1.dmctl.1715843719.7828.out
cov.shardddl1.dmctl.1715843719.7980.out
cov.shardddl1.dmctl.1715843721.8037.out
cov.shardddl1.dmctl.1715843723.8226.out
cov.shardddl1.dmctl.1715843723.8376.out
cov.shardddl1.dmctl.1715843725.8419.out
cov.shardddl1.dmctl.1715843725.8546.out
cov.shardddl1.dmctl.1715843725.8586.out
cov.shardddl1.dmctl.1715843726.8736.out
cov.shardddl1.dmctl.1715843728.8779.out
cov.shardddl1.dmctl.1715843728.8890.out
cov.shardddl1.dmctl.1715843731.9027.out
cov.shardddl1.dmctl.1715843731.9068.out
cov.shardddl1.dmctl.1715843731.9106.out
cov.shardddl1.dmctl.1715843736.9382.out
cov.shardddl1.dmctl.1715843737.9434.out
cov.shardddl1.dmctl.1715843738.9490.out
cov.shardddl1.dmctl.1715843742.9640.out
cov.shardddl1.dmctl.1715843743.9790.out
cov.shardddl1.dmctl.1715843744.9831.out
cov.shardddl1.dmctl.1715843748.9974.out
cov.shardddl1.dmctl.1715843784.10280.out
cov.shardddl1.dmctl.1715843784.10327.out
cov.shardddl1.dmctl.1715843785.10370.out
cov.shardddl1.dmctl.1715843787.10463.out
cov.shardddl1.dmctl.1715843788.10504.out
cov.shardddl1.dmctl.1715843794.10685.out
cov.shardddl1.dmctl.1715843794.10727.out
cov.shardddl1.dmctl.1715843795.10768.out
cov.shardddl1.dmctl.1715843795.10913.out
cov.shardddl1.dmctl.1715843796.10959.out
cov.shardddl1.dmctl.1715843799.11064.out
cov.shardddl1.dmctl.1715843799.11107.out
cov.shardddl1.dmctl.1715843805.11293.out
cov.shardddl1.dmctl.1715843805.11338.out
cov.shardddl1.dmctl.1715843805.11387.out
cov.shardddl1.dmctl.1715843806.11538.out
cov.shardddl1.dmctl.1715843807.11595.out
cov.shardddl1.dmctl.1715843816.12106.out
cov.shardddl1.dmctl.1715843817.12256.out
cov.shardddl1.dmctl.1715843818.12297.out
cov.shardddl1.dmctl.1715843823.12737.out
cov.shardddl1.dmctl.1715843824.12890.out
cov.shardddl1.dmctl.1715843825.12939.out
cov.shardddl1.dmctl.1715843828.13239.out
cov.shardddl1.dmctl.1715843828.13393.out
cov.shardddl1.dmctl.1715843829.13431.out
cov.shardddl1.dmctl.1715843830.13704.out
cov.shardddl1.dmctl.1715843833.14031.out
cov.shardddl1.dmctl.1715843835.14078.out
cov.shardddl1.dmctl.1715843837.14142.out
cov.shardddl1.dmctl.1715843837.14186.out
cov.shardddl1.dmctl.1715843840.14520.out
cov.shardddl1.dmctl.1715843842.14565.out
cov.shardddl1.dmctl.1715843842.14698.out
cov.shardddl1.dmctl.1715843846.15033.out
cov.shardddl1.dmctl.1715843847.15070.out
cov.shardddl1.dmctl.1715843847.15117.out
cov.shardddl1.dmctl.1715843848.15161.out
cov.shardddl1.dmctl.1715843849.15311.out
cov.shardddl1.dmctl.1715843850.15348.out
cov.shardddl1.dmctl.1715843850.15432.out
cov.shardddl1.dmctl.1715843852.15583.out
cov.shardddl1.dmctl.1715843853.15622.out
cov.shardddl1.dmctl.1715843860.15829.out
cov.shardddl1.dmctl.1715843862.15979.out
cov.shardddl1.dmctl.1715843863.16017.out
cov.shardddl1.dmctl.1715843865.16121.out
cov.shardddl1.dmctl.1715843867.16266.out
cov.shardddl1.dmctl.1715843868.16306.out
cov.shardddl1.dmctl.1715843868.16397.out
cov.shardddl1.master.out
cov.shardddl1.worker.8262.1715843604.out
cov.shardddl1.worker.8262.1715843610.out
cov.shardddl1.worker.8262.1715843637.out
cov.shardddl1.worker.8262.1715843674.out
cov.shardddl1.worker.8262.1715843697.out
cov.shardddl1.worker.8262.1715843832.out
cov.shardddl1.worker.8262.1715843839.out
cov.shardddl1.worker.8262.1715843844.out
cov.shardddl1.worker.8263.1715843606.out
cov.shardddl1.worker.8263.1715843610.out
cov.shardddl1.worker.8263.1715843637.out
cov.shardddl1.worker.8263.1715843674.out
cov.shardddl1.worker.8263.1715843698.out
cov.shardddl1.worker.8263.1715843832.out
cov.shardddl1.worker.8263.1715843839.out
cov.shardddl1.worker.8263.1715843844.out
cov.shardddl1_1.dmctl.1715843878.16852.out
cov.shardddl1_1.dmctl.1715843879.16983.out
cov.shardddl1_1.dmctl.1715843880.17074.out
cov.shardddl1_1.dmctl.1715843881.17125.out
cov.shardddl1_1.dmctl.1715843884.17473.out
cov.shardddl1_1.dmctl.1715843885.17626.out
cov.shardddl1_1.dmctl.1715843886.17679.out
cov.shardddl1_1.dmctl.1715843888.17798.out
cov.shardddl1_1.dmctl.1715843890.17946.out
cov.shardddl1_1.dmctl.1715843891.17984.out
cov.shardddl1_1.dmctl.1715843893.18102.out
cov.shardddl1_1.dmctl.1715843894.18254.out
cov.shardddl1_1.dmctl.1715843896.18306.out
cov.shardddl1_1.dmctl.1715843898.18431.out
cov.shardddl1_1.dmctl.1715843899.18580.out
cov.shardddl1_1.dmctl.1715843901.18623.out
cov.shardddl1_1.dmctl.1715843903.18742.out
cov.shardddl1_1.dmctl.1715843904.18890.out
cov.shardddl1_1.dmctl.1715843906.18926.out
cov.shardddl1_1.dmctl.1715843908.19066.out
cov.shardddl1_1.dmctl.1715843909.19212.out
cov.shardddl1_1.dmctl.1715843911.19260.out
cov.shardddl1_1.dmctl.1715843911.19340.out
cov.shardddl1_1.dmctl.1715843912.19482.out
cov.shardddl1_1.dmctl.1715843914.19527.out
cov.shardddl1_1.dmctl.1715843914.19605.out
cov.shardddl1_1.dmctl.1715843915.19749.out
cov.shardddl1_1.dmctl.1715843916.19786.out
cov.shardddl1_1.dmctl.1715843917.19901.out
cov.shardddl1_1.dmctl.1715843918.20045.out
cov.shardddl1_1.dmctl.1715843919.20086.out
cov.shardddl1_1.dmctl.1715843922.20174.out
cov.shardddl1_1.dmctl.1715843923.20318.out
cov.shardddl1_1.dmctl.1715843924.20362.out
cov.shardddl1_1.dmctl.1715843926.20422.out
cov.shardddl1_1.dmctl.1715843928.20567.out
cov.shardddl1_1.dmctl.1715843929.20607.out
cov.shardddl1_1.dmctl.1715843931.20668.out
cov.shardddl1_1.dmctl.1715843932.20811.out
cov.shardddl1_1.dmctl.1715843934.20851.out
cov.shardddl1_1.dmctl.1715843936.20913.out
cov.shardddl1_1.dmctl.1715843937.21055.out
cov.shardddl1_1.dmctl.1715843938.21091.out
cov.shardddl1_1.dmctl.1715843940.21154.out
cov.shardddl1_1.dmctl.1715843942.21299.out
cov.shardddl1_1.dmctl.1715843943.21340.out
cov.shardddl1_1.dmctl.1715843943.21385.out
cov.shardddl1_1.dmctl.1715843944.21527.out
cov.shardddl1_1.dmctl.1715843945.21564.out
cov.shardddl1_1.dmctl.1715843945.21612.out
cov.shardddl1_1.dmctl.1715843947.21758.out
cov.shardddl1_1.dmctl.1715843948.21791.out
cov.shardddl1_1.dmctl.1715843950.21856.out
cov.shardddl1_1.dmctl.1715843952.22004.out
cov.shardddl1_1.dmctl.1715843953.22042.out
cov.shardddl1_1.dmctl.1715843955.22099.out
cov.shardddl1_1.dmctl.1715843956.22241.out
cov.shardddl1_1.dmctl.1715843958.22282.out
cov.shardddl1_1.dmctl.1715843960.22342.out
cov.shardddl1_1.dmctl.1715843961.22492.out
cov.shardddl1_1.dmctl.1715843962.22535.out
cov.shardddl1_1.dmctl.1715843963.22580.out
cov.shardddl1_1.dmctl.1715843963.22619.out
cov.shardddl1_1.dmctl.1715843964.22766.out
cov.shardddl1_1.dmctl.1715843965.22807.out
cov.shardddl1_1.dmctl.1715843966.22908.out
cov.shardddl1_1.dmctl.1715843967.23058.out
cov.shardddl1_1.dmctl.1715843968.23094.out
cov.shardddl1_1.dmctl.1715843968.23162.out
cov.shardddl1_1.dmctl.1715843968.23202.out
cov.shardddl1_1.dmctl.1715843970.23352.out
cov.shardddl1_1.dmctl.1715843971.23388.out
cov.shardddl1_1.dmctl.1715843971.23433.out
cov.shardddl1_1.dmctl.1715843971.23476.out
cov.shardddl1_1.dmctl.1715843973.23626.out
cov.shardddl1_1.dmctl.1715843974.23662.out
cov.shardddl1_1.dmctl.1715843977.23806.out
cov.shardddl1_1.dmctl.1715843977.23951.out
cov.shardddl1_1.dmctl.1715843979.23991.out
cov.shardddl1_1.dmctl.1715843979.24091.out
cov.shardddl1_1.dmctl.1715843980.24242.out
cov.shardddl1_1.dmctl.1715843981.24280.out
cov.shardddl1_1.dmctl.1715843982.24348.out
cov.shardddl1_1.dmctl.1715843982.24491.out
cov.shardddl1_1.dmctl.1715843983.24534.out
cov.shardddl1_1.dmctl.1715843983.24603.out
cov.shardddl1_1.dmctl.1715843984.24647.out
cov.shardddl1_1.dmctl.1715843985.24798.out
cov.shardddl1_1.dmctl.1715843986.24840.out
cov.shardddl1_1.dmctl.1715843987.24972.out
cov.shardddl1_1.dmctl.1715843988.25124.out
cov.shardddl1_1.dmctl.1715843989.25164.out
cov.shardddl1_1.dmctl.1715843992.25306.out
cov.shardddl1_1.dmctl.1715843993.25462.out
cov.shardddl1_1.dmctl.1715843994.25502.out
cov.shardddl1_1.dmctl.1715843995.25570.out
cov.shardddl1_1.dmctl.1715843995.25608.out
cov.shardddl1_1.dmctl.1715843995.25761.out
cov.shardddl1_1.dmctl.1715843996.25800.out
cov.shardddl1_1.dmctl.1715843996.25875.out
cov.shardddl1_1.dmctl.1715843997.25914.out
cov.shardddl1_1.dmctl.1715843998.26070.out
cov.shardddl1_1.dmctl.1715843999.26107.out
cov.shardddl1_1.dmctl.1715844002.26222.out
cov.shardddl1_1.master.out
cov.shardddl1_1.worker.8262.1715843876.out
cov.shardddl1_1.worker.8263.1715843878.out
cov.shardddl2.dmctl.1715844011.26711.out
cov.shardddl2.dmctl.1715844013.26845.out
cov.shardddl2.dmctl.1715844014.26940.out
cov.shardddl2.dmctl.1715844015.26988.out
cov.shardddl2.dmctl.1715844020.27193.out
cov.shardddl2.dmctl.1715844028.27401.out
cov.shardddl2.dmctl.1715844031.27580.out
cov.shardddl2.dmctl.1715844033.27691.out
cov.shardddl2.dmctl.1715844033.27843.out
cov.shardddl2.dmctl.1715844035.27913.out
cov.shardddl2.dmctl.1715844044.28190.out
cov.shardddl2.dmctl.1715844044.28274.out
cov.shardddl2.dmctl.1715844044.28416.out
cov.shardddl2.dmctl.1715844046.28464.out
cov.shardddl2.dmctl.1715844051.28661.out
cov.shardddl2.dmctl.1715844058.28850.out
cov.shardddl2.dmctl.1715844062.29022.out
cov.shardddl2.dmctl.1715844066.29156.out
cov.shardddl2.dmctl.1715844066.29304.out
cov.shardddl2.dmctl.1715844067.29347.out
cov.shardddl2.dmctl.1715844087.29874.out
cov.shardddl2.dmctl.1715844087.29918.out
cov.shardddl2.dmctl.1715844087.30069.out
cov.shardddl2.dmctl.1715844089.30136.out
cov.shardddl2.dmctl.1715844091.30251.out
cov.shardddl2.dmctl.1715844091.30309.out
cov.shardddl2.dmctl.1715844091.30359.out
cov.shardddl2.dmctl.1715844098.30517.out
cov.shardddl2.dmctl.1715844098.30564.out
cov.shardddl2.dmctl.1715844098.30608.out
cov.shardddl2.dmctl.1715844099.30705.out
cov.shardddl2.dmctl.1715844099.30852.out
cov.shardddl2.dmctl.1715844100.30902.out
cov.shardddl2.dmctl.1715844110.31172.out
cov.shardddl2.dmctl.1715844110.31227.out
cov.shardddl2.dmctl.1715844110.31275.out
cov.shardddl2.dmctl.1715844116.31438.out
cov.shardddl2.dmctl.1715844116.31485.out
cov.shardddl2.dmctl.1715844117.31527.out
cov.shardddl2.dmctl.1715844117.31631.out
cov.shardddl2.dmctl.1715844117.31780.out
cov.shardddl2.dmctl.1715844118.31820.out
cov.shardddl2.dmctl.1715844126.32083.out
cov.shardddl2.dmctl.1715844126.32142.out
cov.shardddl2.dmctl.1715844126.32190.out
cov.shardddl2.dmctl.1715844133.32360.out
cov.shardddl2.dmctl.1715844133.32406.out
cov.shardddl2.dmctl.1715844133.32447.out
cov.shardddl2.dmctl.1715844133.32549.out
cov.shardddl2.dmctl.1715844134.32693.out
cov.shardddl2.dmctl.1715844135.32739.out
cov.shardddl2.dmctl.1715844139.32852.out
cov.shardddl2.dmctl.1715844145.33058.out
cov.shardddl2.dmctl.1715844145.33109.out
cov.shardddl2.dmctl.1715844152.33269.out
cov.shardddl2.dmctl.1715844152.33314.out
cov.shardddl2.dmctl.1715844153.33356.out
cov.shardddl2.dmctl.1715844153.33459.out
cov.shardddl2.dmctl.1715844154.33609.out
cov.shardddl2.dmctl.1715844155.33648.out
cov.shardddl2.dmctl.1715844157.33755.out
cov.shardddl2.dmctl.1715844164.33965.out
cov.shardddl2.dmctl.1715844164.34013.out
cov.shardddl2.dmctl.1715844171.34184.out
cov.shardddl2.dmctl.1715844171.34229.out
cov.shardddl2.dmctl.1715844171.34269.out
cov.shardddl2.dmctl.1715844171.34375.out
cov.shardddl2.dmctl.1715844172.34522.out
cov.shardddl2.dmctl.1715844173.34563.out
cov.shardddl2.dmctl.1715844175.34682.out
cov.shardddl2.dmctl.1715844175.34731.out
cov.shardddl2.dmctl.1715844183.34931.out
cov.shardddl2.master.out
cov.shardddl2.worker.8262.1715844010.out
cov.shardddl2.worker.8262.1715844017.out
cov.shardddl2.worker.8263.1715844012.out
cov.shardddl2.worker.8263.1715844048.out
downstream
goroutines
shardddl1
shardddl1_1
shardddl2
sql_res.shardddl1.txt
sql_res.shardddl1_1.txt
sql_res.shardddl2.txt
tidb.toml
++ find /tmp/dm_test/ -type f -name '*.log'
+ tar -cvzf log-G07.tar.gz /tmp/dm_test/shardddl1_1/worker2/log/stdout.log /tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715843934.log /tmp/dm_test/shardddl1_1/dmctl.1715843980.log /tmp/dm_test/shardddl1_1/dmctl.1715843986.log /tmp/dm_test/shardddl1_1/dmctl.1715843960.log /tmp/dm_test/shardddl1_1/dmctl.1715843995.log /tmp/dm_test/shardddl1_1/dmctl.1715843948.log /tmp/dm_test/shardddl1_1/dmctl.1715843884.log /tmp/dm_test/shardddl1_1/dmctl.1715843940.log /tmp/dm_test/shardddl1_1/dmctl.1715844002.log /tmp/dm_test/shardddl1_1/dmctl.1715843947.log /tmp/dm_test/shardddl1_1/dmctl.1715843992.log /tmp/dm_test/shardddl1_1/dmctl.1715843880.log /tmp/dm_test/shardddl1_1/dmctl.1715843955.log /tmp/dm_test/shardddl1_1/dmctl.1715843981.log /tmp/dm_test/shardddl1_1/dmctl.1715843998.log /tmp/dm_test/shardddl1_1/dmctl.1715843898.log /tmp/dm_test/shardddl1_1/dmctl.1715843984.log /tmp/dm_test/shardddl1_1/dmctl.1715843908.log /tmp/dm_test/shardddl1_1/dmctl.1715843983.log /tmp/dm_test/shardddl1_1/dmctl.1715843924.log /tmp/dm_test/shardddl1_1/dmctl.1715843966.log /tmp/dm_test/shardddl1_1/dmctl.1715843977.log /tmp/dm_test/shardddl1_1/worker1/log/stdout.log /tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715843971.log /tmp/dm_test/shardddl1_1/dmctl.1715843903.log /tmp/dm_test/shardddl1_1/dmctl.1715843996.log /tmp/dm_test/shardddl1_1/dmctl.1715843911.log /tmp/dm_test/shardddl1_1/dmctl.1715843899.log /tmp/dm_test/shardddl1_1/dmctl.1715843915.log /tmp/dm_test/shardddl1_1/dmctl.1715843988.log /tmp/dm_test/shardddl1_1/dmctl.1715843987.log /tmp/dm_test/shardddl1_1/dmctl.1715843961.log /tmp/dm_test/shardddl1_1/dmctl.1715843973.log /tmp/dm_test/shardddl1_1/dmctl.1715843891.log /tmp/dm_test/shardddl1_1/dmctl.1715843956.log /tmp/dm_test/shardddl1_1/dmctl.1715843879.log /tmp/dm_test/shardddl1_1/dmctl.1715843918.log /tmp/dm_test/shardddl1_1/dmctl.1715843994.log /tmp/dm_test/shardddl1_1/dmctl.1715843901.log /tmp/dm_test/shardddl1_1/dmctl.1715843909.log /tmp/dm_test/shardddl1_1/dmctl.1715843970.log /tmp/dm_test/shardddl1_1/dmctl.1715843912.log /tmp/dm_test/shardddl1_1/dmctl.1715843932.log /tmp/dm_test/shardddl1_1/dmctl.1715843938.log /tmp/dm_test/shardddl1_1/dmctl.1715843923.log /tmp/dm_test/shardddl1_1/dmctl.1715843985.log /tmp/dm_test/shardddl1_1/dmctl.1715843993.log /tmp/dm_test/shardddl1_1/dmctl.1715843919.log /tmp/dm_test/shardddl1_1/dmctl.1715843953.log /tmp/dm_test/shardddl1_1/dmctl.1715843989.log /tmp/dm_test/shardddl1_1/dmctl.1715843962.log /tmp/dm_test/shardddl1_1/dmctl.1715843968.log /tmp/dm_test/shardddl1_1/master/log/dm-master.log /tmp/dm_test/shardddl1_1/master/log/stdout.log /tmp/dm_test/shardddl1_1/dmctl.1715843979.log /tmp/dm_test/shardddl1_1/dmctl.1715843967.log /tmp/dm_test/shardddl1_1/dmctl.1715843922.log /tmp/dm_test/shardddl1_1/dmctl.1715843906.log /tmp/dm_test/shardddl1_1/dmctl.1715843896.log /tmp/dm_test/shardddl1_1/dmctl.1715843950.log /tmp/dm_test/shardddl1_1/dmctl.1715843963.log /tmp/dm_test/shardddl1_1/dmctl.1715843890.log /tmp/dm_test/shardddl1_1/dmctl.1715843945.log /tmp/dm_test/shardddl1_1/dmctl.1715843974.log /tmp/dm_test/shardddl1_1/dmctl.1715843916.log /tmp/dm_test/shardddl1_1/dmctl.1715843931.log /tmp/dm_test/shardddl1_1/dmctl.1715843942.log /tmp/dm_test/shardddl1_1/dmctl.1715843926.log /tmp/dm_test/shardddl1_1/dmctl.1715843999.log /tmp/dm_test/shardddl1_1/dmctl.1715843997.log /tmp/dm_test/shardddl1_1/dmctl.1715843982.log /tmp/dm_test/shardddl1_1/sync_diff_stdout.log /tmp/dm_test/shardddl1_1/dmctl.1715843885.log /tmp/dm_test/shardddl1_1/dmctl.1715843904.log /tmp/dm_test/shardddl1_1/dmctl.1715843914.log /tmp/dm_test/shardddl1_1/dmctl.1715843881.log /tmp/dm_test/shardddl1_1/dmctl.1715843917.log /tmp/dm_test/shardddl1_1/dmctl.1715843944.log /tmp/dm_test/shardddl1_1/dmctl.1715843928.log /tmp/dm_test/shardddl1_1/dmctl.1715843958.log /tmp/dm_test/shardddl1_1/dmctl.1715843943.log /tmp/dm_test/shardddl1_1/dmctl.1715843936.log /tmp/dm_test/shardddl1_1/dmctl.1715843937.log /tmp/dm_test/shardddl1_1/dmctl.1715843929.log /tmp/dm_test/shardddl1_1/dmctl.1715843878.log /tmp/dm_test/shardddl1_1/dmctl.1715843964.log /tmp/dm_test/shardddl1_1/dmctl.1715843893.log /tmp/dm_test/shardddl1_1/dmctl.1715843965.log /tmp/dm_test/shardddl1_1/dmctl.1715843952.log /tmp/dm_test/shardddl1_1/dmctl.1715843888.log /tmp/dm_test/shardddl1_1/dmctl.1715843886.log /tmp/dm_test/shardddl1_1/dmctl.1715843894.log /tmp/dm_test/shardddl2/dmctl.1715844134.log /tmp/dm_test/shardddl2/dmctl.1715844028.log /tmp/dm_test/shardddl2/worker2/log/stdout.log /tmp/dm_test/shardddl2/worker2/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715844152.log /tmp/dm_test/shardddl2/dmctl.1715844126.log /tmp/dm_test/shardddl2/dmctl.1715844099.log /tmp/dm_test/shardddl2/dmctl.1715844157.log /tmp/dm_test/shardddl2/dmctl.1715844066.log /tmp/dm_test/shardddl2/dmctl.1715844183.log /tmp/dm_test/shardddl2/dmctl.1715844020.log /tmp/dm_test/shardddl2/dmctl.1715844155.log /tmp/dm_test/shardddl2/worker1/log/stdout.log /tmp/dm_test/shardddl2/worker1/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715844175.log /tmp/dm_test/shardddl2/dmctl.1715844173.log /tmp/dm_test/shardddl2/dmctl.1715844058.log /tmp/dm_test/shardddl2/dmctl.1715844062.log /tmp/dm_test/shardddl2/dmctl.1715844015.log /tmp/dm_test/shardddl2/dmctl.1715844145.log /tmp/dm_test/shardddl2/dmctl.1715844033.log /tmp/dm_test/shardddl2/dmctl.1715844046.log /tmp/dm_test/shardddl2/dmctl.1715844116.log /tmp/dm_test/shardddl2/dmctl.1715844031.log /tmp/dm_test/shardddl2/dmctl.1715844172.log /tmp/dm_test/shardddl2/dmctl.1715844098.log /tmp/dm_test/shardddl2/master/log/dm-master.log /tmp/dm_test/shardddl2/master/log/stdout.log /tmp/dm_test/shardddl2/dmctl.1715844135.log /tmp/dm_test/shardddl2/dmctl.1715844051.log /tmp/dm_test/shardddl2/dmctl.1715844100.log /tmp/dm_test/shardddl2/dmctl.1715844089.log /tmp/dm_test/shardddl2/dmctl.1715844067.log /tmp/dm_test/shardddl2/dmctl.1715844013.log /tmp/dm_test/shardddl2/dmctl.1715844154.log /tmp/dm_test/shardddl2/dmctl.1715844118.log /tmp/dm_test/shardddl2/dmctl.1715844171.log /tmp/dm_test/shardddl2/sync_diff_stdout.log /tmp/dm_test/shardddl2/dmctl.1715844091.log /tmp/dm_test/shardddl2/dmctl.1715844153.log /tmp/dm_test/shardddl2/dmctl.1715844139.log /tmp/dm_test/shardddl2/dmctl.1715844014.log /tmp/dm_test/shardddl2/dmctl.1715844035.log /tmp/dm_test/shardddl2/dmctl.1715844117.log /tmp/dm_test/shardddl2/dmctl.1715844044.log /tmp/dm_test/shardddl2/dmctl.1715844133.log /tmp/dm_test/shardddl2/dmctl.1715844164.log /tmp/dm_test/shardddl2/dmctl.1715844087.log /tmp/dm_test/shardddl2/dmctl.1715844011.log /tmp/dm_test/shardddl2/dmctl.1715844110.log /tmp/dm_test/goroutines/stack/log/worker-8263.log /tmp/dm_test/goroutines/stack/log/worker-8264.log /tmp/dm_test/goroutines/stack/log/master-8261.log /tmp/dm_test/goroutines/stack/log/worker-18262.log /tmp/dm_test/goroutines/stack/log/master-8361.log /tmp/dm_test/goroutines/stack/log/master-8761.log /tmp/dm_test/goroutines/stack/log/worker-18263.log /tmp/dm_test/goroutines/stack/log/master-8561.log /tmp/dm_test/goroutines/stack/log/master-8461.log /tmp/dm_test/goroutines/stack/log/worker-8262.log /tmp/dm_test/goroutines/stack/log/master-8661.log /tmp/dm_test/downstream/tidb/log/tidb.log /tmp/dm_test/shardddl1/dmctl.1715843708.log /tmp/dm_test/shardddl1/dmctl.1715843805.log /tmp/dm_test/shardddl1/worker2/log/stdout.log /tmp/dm_test/shardddl1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715843784.log /tmp/dm_test/shardddl1/dmctl.1715843743.log /tmp/dm_test/shardddl1/dmctl.1715843721.log /tmp/dm_test/shardddl1/dmctl.1715843816.log /tmp/dm_test/shardddl1/dmctl.1715843787.log /tmp/dm_test/shardddl1/dmctl.1715843807.log /tmp/dm_test/shardddl1/dmctl.1715843611.log /tmp/dm_test/shardddl1/dmctl.1715843862.log /tmp/dm_test/shardddl1/dmctl.1715843847.log /tmp/dm_test/shardddl1/dmctl.1715843799.log /tmp/dm_test/shardddl1/dmctl.1715843706.log /tmp/dm_test/shardddl1/dmctl.1715843605.log /tmp/dm_test/shardddl1/dmctl.1715843830.log /tmp/dm_test/shardddl1/dmctl.1715843853.log /tmp/dm_test/shardddl1/dmctl.1715843742.log /tmp/dm_test/shardddl1/dmctl.1715843846.log /tmp/dm_test/shardddl1/dmctl.1715843837.log /tmp/dm_test/shardddl1/dmctl.1715843852.log /tmp/dm_test/shardddl1/dmctl.1715843738.log /tmp/dm_test/shardddl1/dmctl.1715843719.log /tmp/dm_test/shardddl1/dmctl.1715843701.log /tmp/dm_test/shardddl1/dmctl.1715843699.log /tmp/dm_test/shardddl1/dmctl.1715843744.log /tmp/dm_test/shardddl1/worker1/log/stdout.log /tmp/dm_test/shardddl1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715843795.log /tmp/dm_test/shardddl1/dmctl.1715843736.log /tmp/dm_test/shardddl1/dmctl.1715843725.log /tmp/dm_test/shardddl1/dmctl.1715843823.log /tmp/dm_test/shardddl1/dmctl.1715843640.log /tmp/dm_test/shardddl1/dmctl.1715843716.log /tmp/dm_test/shardddl1/dmctl.1715843849.log /tmp/dm_test/shardddl1/dmctl.1715843702.log /tmp/dm_test/shardddl1/dmctl.1715843829.log /tmp/dm_test/shardddl1/dmctl.1715843828.log /tmp/dm_test/shardddl1/dmctl.1715843726.log /tmp/dm_test/shardddl1/dmctl.1715843863.log /tmp/dm_test/shardddl1/dmctl.1715843728.log /tmp/dm_test/shardddl1/dmctl.1715843723.log /tmp/dm_test/shardddl1/dmctl.1715843707.log /tmp/dm_test/shardddl1/master/log/dm-master.log /tmp/dm_test/shardddl1/master/log/stdout.log /tmp/dm_test/shardddl1/dmctl.1715843700.log /tmp/dm_test/shardddl1/dmctl.1715843806.log /tmp/dm_test/shardddl1/dmctl.1715843868.log /tmp/dm_test/shardddl1/dmctl.1715843817.log /tmp/dm_test/shardddl1/dmctl.1715843737.log /tmp/dm_test/shardddl1/dmctl.1715843840.log /tmp/dm_test/shardddl1/dmctl.1715843835.log /tmp/dm_test/shardddl1/dmctl.1715843796.log /tmp/dm_test/shardddl1/dmctl.1715843824.log /tmp/dm_test/shardddl1/dmctl.1715843794.log /tmp/dm_test/shardddl1/dmctl.1715843607.log /tmp/dm_test/shardddl1/dmctl.1715843865.log /tmp/dm_test/shardddl1/dmctl.1715843867.log /tmp/dm_test/shardddl1/dmctl.1715843825.log /tmp/dm_test/shardddl1/dmctl.1715843694.log /tmp/dm_test/shardddl1/sync_diff_stdout.log /tmp/dm_test/shardddl1/dmctl.1715843704.log /tmp/dm_test/shardddl1/dmctl.1715843850.log /tmp/dm_test/shardddl1/dmctl.1715843788.log /tmp/dm_test/shardddl1/dmctl.1715843677.log /tmp/dm_test/shardddl1/dmctl.1715843860.log /tmp/dm_test/shardddl1/dmctl.1715843785.log /tmp/dm_test/shardddl1/dmctl.1715843612.log /tmp/dm_test/shardddl1/dmctl.1715843842.log /tmp/dm_test/shardddl1/dmctl.1715843672.log /tmp/dm_test/shardddl1/dmctl.1715843818.log /tmp/dm_test/shardddl1/dmctl.1715843833.log /tmp/dm_test/shardddl1/dmctl.1715843731.log /tmp/dm_test/shardddl1/dmctl.1715843635.log /tmp/dm_test/shardddl1/dmctl.1715843848.log /tmp/dm_test/shardddl1/dmctl.1715843675.log /tmp/dm_test/shardddl1/dmctl.1715843638.log /tmp/dm_test/shardddl1/dmctl.1715843748.log
tar: Removing leading `/' from member names
/tmp/dm_test/shardddl1_1/worker2/log/stdout.log
/tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715843934.log
/tmp/dm_test/shardddl1_1/dmctl.1715843980.log
/tmp/dm_test/shardddl1_1/dmctl.1715843986.log
/tmp/dm_test/shardddl1_1/dmctl.1715843960.log
/tmp/dm_test/shardddl1_1/dmctl.1715843995.log
/tmp/dm_test/shardddl1_1/dmctl.1715843948.log
/tmp/dm_test/shardddl1_1/dmctl.1715843884.log
/tmp/dm_test/shardddl1_1/dmctl.1715843940.log
/tmp/dm_test/shardddl1_1/dmctl.1715844002.log
/tmp/dm_test/shardddl1_1/dmctl.1715843947.log
/tmp/dm_test/shardddl1_1/dmctl.1715843992.log
/tmp/dm_test/shardddl1_1/dmctl.1715843880.log
/tmp/dm_test/shardddl1_1/dmctl.1715843955.log
/tmp/dm_test/shardddl1_1/dmctl.1715843981.log
/tmp/dm_test/shardddl1_1/dmctl.1715843998.log
/tmp/dm_test/shardddl1_1/dmctl.1715843898.log
/tmp/dm_test/shardddl1_1/dmctl.1715843984.log
/tmp/dm_test/shardddl1_1/dmctl.1715843908.log
/tmp/dm_test/shardddl1_1/dmctl.1715843983.log
/tmp/dm_test/shardddl1_1/dmctl.1715843924.log
/tmp/dm_test/shardddl1_1/dmctl.1715843966.log
/tmp/dm_test/shardddl1_1/dmctl.1715843977.log
/tmp/dm_test/shardddl1_1/worker1/log/stdout.log
/tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715843971.log
/tmp/dm_test/shardddl1_1/dmctl.1715843903.log
/tmp/dm_test/shardddl1_1/dmctl.1715843996.log
/tmp/dm_test/shardddl1_1/dmctl.1715843911.log
/tmp/dm_test/shardddl1_1/dmctl.1715843899.log
/tmp/dm_test/shardddl1_1/dmctl.1715843915.log
/tmp/dm_test/shardddl1_1/dmctl.1715843988.log
/tmp/dm_test/shardddl1_1/dmctl.1715843987.log
/tmp/dm_test/shardddl1_1/dmctl.1715843961.log
/tmp/dm_test/shardddl1_1/dmctl.1715843973.log
/tmp/dm_test/shardddl1_1/dmctl.1715843891.log
/tmp/dm_test/shardddl1_1/dmctl.1715843956.log
/tmp/dm_test/shardddl1_1/dmctl.1715843879.log
/tmp/dm_test/shardddl1_1/dmctl.1715843918.log
/tmp/dm_test/shardddl1_1/dmctl.1715843994.log
/tmp/dm_test/shardddl1_1/dmctl.1715843901.log
/tmp/dm_test/shardddl1_1/dmctl.1715843909.log
/tmp/dm_test/shardddl1_1/dmctl.1715843970.log
/tmp/dm_test/shardddl1_1/dmctl.1715843912.log
/tmp/dm_test/shardddl1_1/dmctl.1715843932.log
/tmp/dm_test/shardddl1_1/dmctl.1715843938.log
/tmp/dm_test/shardddl1_1/dmctl.1715843923.log
/tmp/dm_test/shardddl1_1/dmctl.1715843985.log
/tmp/dm_test/shardddl1_1/dmctl.1715843993.log
/tmp/dm_test/shardddl1_1/dmctl.1715843919.log
/tmp/dm_test/shardddl1_1/dmctl.1715843953.log
/tmp/dm_test/shardddl1_1/dmctl.1715843989.log
/tmp/dm_test/shardddl1_1/dmctl.1715843962.log
/tmp/dm_test/shardddl1_1/dmctl.1715843968.log
/tmp/dm_test/shardddl1_1/master/log/dm-master.log
/tmp/dm_test/shardddl1_1/master/log/stdout.log
/tmp/dm_test/shardddl1_1/dmctl.1715843979.log
/tmp/dm_test/shardddl1_1/dmctl.1715843967.log
/tmp/dm_test/shardddl1_1/dmctl.1715843922.log
/tmp/dm_test/shardddl1_1/dmctl.1715843906.log
/tmp/dm_test/shardddl1_1/dmctl.1715843896.log
/tmp/dm_test/shardddl1_1/dmctl.1715843950.log
/tmp/dm_test/shardddl1_1/dmctl.1715843963.log
/tmp/dm_test/shardddl1_1/dmctl.1715843890.log
/tmp/dm_test/shardddl1_1/dmctl.1715843945.log
/tmp/dm_test/shardddl1_1/dmctl.1715843974.log
/tmp/dm_test/shardddl1_1/dmctl.1715843916.log
/tmp/dm_test/shardddl1_1/dmctl.1715843931.log
/tmp/dm_test/shardddl1_1/dmctl.1715843942.log
/tmp/dm_test/shardddl1_1/dmctl.1715843926.log
/tmp/dm_test/shardddl1_1/dmctl.1715843999.log
/tmp/dm_test/shardddl1_1/dmctl.1715843997.log
/tmp/dm_test/shardddl1_1/dmctl.1715843982.log
/tmp/dm_test/shardddl1_1/sync_diff_stdout.log
/tmp/dm_test/shardddl1_1/dmctl.1715843885.log
/tmp/dm_test/shardddl1_1/dmctl.1715843904.log
/tmp/dm_test/shardddl1_1/dmctl.1715843914.log
/tmp/dm_test/shardddl1_1/dmctl.1715843881.log
/tmp/dm_test/shardddl1_1/dmctl.1715843917.log
/tmp/dm_test/shardddl1_1/dmctl.1715843944.log
/tmp/dm_test/shardddl1_1/dmctl.1715843928.log
/tmp/dm_test/shardddl1_1/dmctl.1715843958.log
/tmp/dm_test/shardddl1_1/dmctl.1715843943.log
/tmp/dm_test/shardddl1_1/dmctl.1715843936.log
/tmp/dm_test/shardddl1_1/dmctl.1715843937.log
/tmp/dm_test/shardddl1_1/dmctl.1715843929.log
/tmp/dm_test/shardddl1_1/dmctl.1715843878.log
/tmp/dm_test/shardddl1_1/dmctl.1715843964.log
/tmp/dm_test/shardddl1_1/dmctl.1715843893.log
/tmp/dm_test/shardddl1_1/dmctl.1715843965.log
/tmp/dm_test/shardddl1_1/dmctl.1715843952.log
/tmp/dm_test/shardddl1_1/dmctl.1715843888.log
/tmp/dm_test/shardddl1_1/dmctl.1715843886.log
/tmp/dm_test/shardddl1_1/dmctl.1715843894.log
/tmp/dm_test/shardddl2/dmctl.1715844134.log
/tmp/dm_test/shardddl2/dmctl.1715844028.log
/tmp/dm_test/shardddl2/worker2/log/stdout.log
/tmp/dm_test/shardddl2/worker2/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715844152.log
/tmp/dm_test/shardddl2/dmctl.1715844126.log
/tmp/dm_test/shardddl2/dmctl.1715844099.log
/tmp/dm_test/shardddl2/dmctl.1715844157.log
/tmp/dm_test/shardddl2/dmctl.1715844066.log
/tmp/dm_test/shardddl2/dmctl.1715844183.log
/tmp/dm_test/shardddl2/dmctl.1715844020.log
/tmp/dm_test/shardddl2/dmctl.1715844155.log
/tmp/dm_test/shardddl2/worker1/log/stdout.log
/tmp/dm_test/shardddl2/worker1/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715844175.log
/tmp/dm_test/shardddl2/dmctl.1715844173.log
/tmp/dm_test/shardddl2/dmctl.1715844058.log
/tmp/dm_test/shardddl2/dmctl.1715844062.log
/tmp/dm_test/shardddl2/dmctl.1715844015.log
/tmp/dm_test/shardddl2/dmctl.1715844145.log
/tmp/dm_test/shardddl2/dmctl.1715844033.log
/tmp/dm_test/shardddl2/dmctl.1715844046.log
/tmp/dm_test/shardddl2/dmctl.1715844116.log
/tmp/dm_test/shardddl2/dmctl.1715844031.log
/tmp/dm_test/shardddl2/dmctl.1715844172.log
/tmp/dm_test/shardddl2/dmctl.1715844098.log
/tmp/dm_test/shardddl2/master/log/dm-master.log
/tmp/dm_test/shardddl2/master/log/stdout.log
/tmp/dm_test/shardddl2/dmctl.1715844135.log
/tmp/dm_test/shardddl2/dmctl.1715844051.log
/tmp/dm_test/shardddl2/dmctl.1715844100.log
/tmp/dm_test/shardddl2/dmctl.1715844089.log
/tmp/dm_test/shardddl2/dmctl.1715844067.log
/tmp/dm_test/shardddl2/dmctl.1715844013.log
/tmp/dm_test/shardddl2/dmctl.1715844154.log
/tmp/dm_test/shardddl2/dmctl.1715844118.log
/tmp/dm_test/shardddl2/dmctl.1715844171.log
/tmp/dm_test/shardddl2/sync_diff_stdout.log
/tmp/dm_test/shardddl2/dmctl.1715844091.log
/tmp/dm_test/shardddl2/dmctl.1715844153.log
/tmp/dm_test/shardddl2/dmctl.1715844139.log
/tmp/dm_test/shardddl2/dmctl.1715844014.log
/tmp/dm_test/shardddl2/dmctl.1715844035.log
/tmp/dm_test/shardddl2/dmctl.1715844117.log
/tmp/dm_test/shardddl2/dmctl.1715844044.log
/tmp/dm_test/shardddl2/dmctl.1715844133.log
/tmp/dm_test/shardddl2/dmctl.1715844164.log
/tmp/dm_test/shardddl2/dmctl.1715844087.log
/tmp/dm_test/shardddl2/dmctl.1715844011.log
/tmp/dm_test/shardddl2/dmctl.1715844110.log
/tmp/dm_test/goroutines/stack/log/worker-8263.log
/tmp/dm_test/goroutines/stack/log/worker-8264.log
/tmp/dm_test/goroutines/stack/log/master-8261.log
/tmp/dm_test/goroutines/stack/log/worker-18262.log
/tmp/dm_test/goroutines/stack/log/master-8361.log
/tmp/dm_test/goroutines/stack/log/master-8761.log
/tmp/dm_test/goroutines/stack/log/worker-18263.log
/tmp/dm_test/goroutines/stack/log/master-8561.log
/tmp/dm_test/goroutines/stack/log/master-8461.log
/tmp/dm_test/goroutines/stack/log/worker-8262.log
/tmp/dm_test/goroutines/stack/log/master-8661.log
/tmp/dm_test/downstream/tidb/log/tidb.log
/tmp/dm_test/shardddl1/dmctl.1715843708.log
/tmp/dm_test/shardddl1/dmctl.1715843805.log
/tmp/dm_test/shardddl1/worker2/log/stdout.log
/tmp/dm_test/shardddl1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715843784.log
/tmp/dm_test/shardddl1/dmctl.1715843743.log
/tmp/dm_test/shardddl1/dmctl.1715843721.log
/tmp/dm_test/shardddl1/dmctl.1715843816.log
/tmp/dm_test/shardddl1/dmctl.1715843787.log
/tmp/dm_test/shardddl1/dmctl.1715843807.log
/tmp/dm_test/shardddl1/dmctl.1715843611.log
/tmp/dm_test/shardddl1/dmctl.1715843862.log
/tmp/dm_test/shardddl1/dmctl.1715843847.log
/tmp/dm_test/shardddl1/dmctl.1715843799.log
/tmp/dm_test/shardddl1/dmctl.1715843706.log
/tmp/dm_test/shardddl1/dmctl.1715843605.log
/tmp/dm_test/shardddl1/dmctl.1715843830.log
/tmp/dm_test/shardddl1/dmctl.1715843853.log
/tmp/dm_test/shardddl1/dmctl.1715843742.log
/tmp/dm_test/shardddl1/dmctl.1715843846.log
/tmp/dm_test/shardddl1/dmctl.1715843837.log
/tmp/dm_test/shardddl1/dmctl.1715843852.log
/tmp/dm_test/shardddl1/dmctl.1715843738.log
/tmp/dm_test/shardddl1/dmctl.1715843719.log
/tmp/dm_test/shardddl1/dmctl.1715843701.log
/tmp/dm_test/shardddl1/dmctl.1715843699.log
/tmp/dm_test/shardddl1/dmctl.1715843744.log
/tmp/dm_test/shardddl1/worker1/log/stdout.log
/tmp/dm_test/shardddl1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715843795.log
/tmp/dm_test/shardddl1/dmctl.1715843736.log
/tmp/dm_test/shardddl1/dmctl.1715843725.log
/tmp/dm_test/shardddl1/dmctl.1715843823.log
/tmp/dm_test/shardddl1/dmctl.1715843640.log
/tmp/dm_test/shardddl1/dmctl.1715843716.log
/tmp/dm_test/shardddl1/dmctl.1715843849.log
/tmp/dm_test/shardddl1/dmctl.1715843702.log
/tmp/dm_test/shardddl1/dmctl.1715843829.log
/tmp/dm_test/shardddl1/dmctl.1715843828.log
/tmp/dm_test/shardddl1/dmctl.1715843726.log
/tmp/dm_test/shardddl1/dmctl.1715843863.log
/tmp/dm_test/shardddl1/dmctl.1715843728.log
/tmp/dm_test/shardddl1/dmctl.1715843723.log
/tmp/dm_test/shardddl1/dmctl.1715843707.log
/tmp/dm_test/shardddl1/master/log/dm-master.log
/tmp/dm_test/shardddl1/master/log/stdout.log
/tmp/dm_test/shardddl1/dmctl.1715843700.log
/tmp/dm_test/shardddl1/dmctl.1715843806.log
/tmp/dm_test/shardddl1/dmctl.1715843868.log
/tmp/dm_test/shardddl1/dmctl.1715843817.log
/tmp/dm_test/shardddl1/dmctl.1715843737.log
/tmp/dm_test/shardddl1/dmctl.1715843840.log
/tmp/dm_test/shardddl1/dmctl.1715843835.log
/tmp/dm_test/shardddl1/dmctl.1715843796.log
/tmp/dm_test/shardddl1/dmctl.1715843824.log
/tmp/dm_test/shardddl1/dmctl.1715843794.log
/tmp/dm_test/shardddl1/dmctl.1715843607.log
/tmp/dm_test/shardddl1/dmctl.1715843865.log
/tmp/dm_test/shardddl1/dmctl.1715843867.log
/tmp/dm_test/shardddl1/dmctl.1715843825.log
/tmp/dm_test/shardddl1/dmctl.1715843694.log
/tmp/dm_test/shardddl1/sync_diff_stdout.log
/tmp/dm_test/shardddl1/dmctl.1715843704.log
/tmp/dm_test/shardddl1/dmctl.1715843850.log
/tmp/dm_test/shardddl1/dmctl.1715843788.log
/tmp/dm_test/shardddl1/dmctl.1715843677.log
/tmp/dm_test/shardddl1/dmctl.1715843860.log
/tmp/dm_test/shardddl1/dmctl.1715843785.log
/tmp/dm_test/shardddl1/dmctl.1715843612.log
/tmp/dm_test/shardddl1/dmctl.1715843842.log
/tmp/dm_test/shardddl1/dmctl.1715843672.log
/tmp/dm_test/shardddl1/dmctl.1715843818.log
/tmp/dm_test/shardddl1/dmctl.1715843833.log
/tmp/dm_test/shardddl1/dmctl.1715843731.log
/tmp/dm_test/shardddl1/dmctl.1715843635.log
/tmp/dm_test/shardddl1/dmctl.1715843848.log
/tmp/dm_test/shardddl1/dmctl.1715843675.log
/tmp/dm_test/shardddl1/dmctl.1715843638.log
/tmp/dm_test/shardddl1/dmctl.1715843748.log
+ ls -alh log-G07.tar.gz
-rw-r--r-- 1 jenkins jenkins 672K May 16 15:23 log-G07.tar.gz
[Pipeline] archiveArtifacts
Archiving artifacts
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "list-member --name worker2 --name worker1"
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
got=1 expected=1
got=1 expected=1
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
kill dm-worker1
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
Sending interrupt signal to process
Killing processes
rpc addr 127.0.0.1:8261 is alive
check log contain failed 1-th time, retry later
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
kill finished with exit code 0
[Pipeline] }
Cache not saved (inner-step execution failed)
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
[Pipeline] // cache
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
[Pipeline] // cache
[Pipeline] // dir
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] // container
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // withEnv
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 2
Finished: FAILURE