Skip to content

Console Output

Skipping 952 KB.. Full Log
wait for rpc addr 127.0.0.1:18263 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:54:43 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "list-member --name worker1 --name worker2"
got=2 expected=2
start worker3
[Mon May 13 16:54:43 CST 2024] <<<<<< START DM-WORKER on port 8264, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker3.toml >>>>>>
wait for rpc addr 127.0.0.1:8264 alive the 1-th time
rpc addr 127.0.0.1:18263 is alive
try to kill worker port 8262
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl2/source1.yaml"
[Mon May 13 16:54:44 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8264 is alive
start worker4
[Mon May 13 16:54:44 CST 2024] <<<<<< START DM-WORKER on port 18262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker4.toml >>>>>>
[Mon May 13 16:54:45 CST 2024] <<<<<< finish DM-138 optimistic >>>>>>
wait process dm-worker1 exit...
wait for rpc addr 127.0.0.1:18262 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl2/source2.yaml"
wait process dm-worker1 exit...
process dm-worker1 already exit
worker1 was killed
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:18262 is alive
dmctl test cmd: "list-member --name worker3 --name worker4"
got=1 expected=1
got=1 expected=1
got=2 expected=2
dmctl test cmd: "query-status test2"
dmctl test cmd: "start-relay -s mysql-replica-01 worker3"
[Mon May 13 16:54:46 CST 2024] <<<<<< start DM-139 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
got=2 expected=2
[Mon May 13 16:54:46 CST 2024] <<<<<< start DM-DROP_COLUMN_EXEC_ERROR optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status t-Ë!s`t"
dmctl test cmd: "query-status test"
got=2 expected=2
got=2 expected=2
dmctl test cmd: "start-relay -s mysql-replica-02 worker4"
check diff failed 1-th time, retry later
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task t-Ë!s`t"
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker 1
got=2 expected=2
dmctl test cmd: "query-status test"
got=4 expected=4
check diff successfully
kill dm-worker3
wait process dm-worker1 exit...
wait process dm-worker3 exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:54:50 CST 2024] <<<<<< finish DM-139 pessimistic >>>>>>
[Mon May 13 16:54:50 CST 2024] <<<<<< start DM-139 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker1 exit...
process dm-worker1 already exit
[Mon May 13 16:54:50 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
check log contain failed 1-th time, retry later
wait process dm-worker3 exit...
process dm-worker3 already exit
kill dm-worker4
[Mon May 13 16:54:52 CST 2024] <<<<<< finish DM-139 optimistic >>>>>>
[Mon May 13 16:54:53 CST 2024] <<<<<< start DM-142 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker4 exit...
process dm-worker4 already exit
dmctl test cmd: "list-member --name worker3 --name worker4"
got=2 expected=2
start worker1
[Mon May 13 16:54:53 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=1 expected=1
restart dm-master
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8262 is alive
start worker2
[Mon May 13 16:54:54 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff failed 1-th time, retry later
wait process dm-master exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "list-member --name worker1 --name worker2"
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
got=1 expected=1
got=1 expected=1
num1 1 num2 2
[Mon May 13 16:54:56 CST 2024] <<<<<< finish test_last_bound >>>>>>
[Mon May 13 16:54:56 CST 2024] <<<<<< start test_config_name >>>>>>
[Mon May 13 16:54:56 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join1.toml >>>>>>
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:54:56 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /tmp/dm_test/ha_cases/dm-master-join2.toml >>>>>>
wait process dm-master.test exit...
process dm-master.test already exit
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:54:57 CST 2024] <<<<<< finish DM-142 pessimistic >>>>>>
wait process dm-worker.test exit...
[Mon May 13 16:54:58 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check log contain failed 1-th time (file not exist), retry later
[Mon May 13 16:54:58 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /tmp/dm_test/ha_cases/dm-master-join2.toml >>>>>>
[Mon May 13 16:54:58 CST 2024] <<<<<< start DM-143 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8361 is alive
[Mon May 13 16:54:58 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
rpc addr 127.0.0.1:8262 is alive
[Mon May 13 16:54:58 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /tmp/dm_test/ha_cases/dm-worker2.toml >>>>>>
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:55:00 CST 2024] <<<<<< finish test_query_timeout >>>>>>
[Mon May 13 16:55:00 CST 2024] <<<<<< start test_regexpr_router regexpr-task.yaml >>>>>>
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:55:00 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Mon May 13 16:55:00 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /tmp/dm_test/ha_cases/dm-worker2.toml >>>>>>
rpc addr 127.0.0.1:8263 is alive
[Mon May 13 16:55:00 CST 2024] <<<<<< finish test_config_name >>>>>>
[Mon May 13 16:55:00 CST 2024] <<<<<< start test_join_masters_and_worker >>>>>>
3 dm-master alive
3 dm-worker alive
[Mon May 13 16:55:00 CST 2024] <<<<<< finish DM-143 pessimistic >>>>>>
0 dm-syncer alive
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Mon May 13 16:55:01 CST 2024] <<<<<< start DM-145 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:55:01 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
restart dm-worker 1
rpc addr 127.0.0.1:8262 is alive
[Mon May 13 16:55:02 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
wait process dm-worker1 exit...
got=2 expected=2
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source2.yaml"
wait process dm-master.test exit...
wait process dm-worker1 exit...
process dm-worker1 already exit
[Mon May 13 16:55:03 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "query-status test"
dmctl test cmd: "start-task /tmp/dm_test/all_mode/regexpr-task.yaml --remove-meta"
dmctl test cmd: "start-task /tmp/dm_test/all_mode/regexpr-task.yaml --remove-meta"
got=3 expected=3
check diff failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
[Mon May 13 16:55:05 CST 2024] <<<<<< finish DM-145 pessimistic >>>>>>
[Mon May 13 16:55:05 CST 2024] <<<<<< start DM-145 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:07 CST 2024] <<<<<< finish DM-DROP_COLUMN_EXEC_ERROR optimistic >>>>>>
[Mon May 13 16:55:07 CST 2024] <<<<<< start DM-INIT_SCHEMA optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
[Mon May 13 16:55:07 CST 2024] <<<<<< finish DM-145 optimistic >>>>>>
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "query-status test"
got=2 expected=2
[Mon May 13 16:55:08 CST 2024] <<<<<< start DM-146 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
[Mon May 13 16:55:10 CST 2024] <<<<<< finish DM-146 pessimistic >>>>>>
[Mon May 13 16:55:10 CST 2024] <<<<<< start DM-146 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
restart dm-master
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:55:11 CST 2024] <<<<<< finish test_regexpr_router regexpr-task.yaml >>>>>>
[Mon May 13 16:55:11 CST 2024] <<<<<< start test_regexpr_router regexpr-task-lightning.yaml >>>>>>
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:55:11 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-master exit...
got=1 expected=1
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:12 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join1.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:55:12 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Mon May 13 16:55:12 CST 2024] <<<<<< finish DM-146 optimistic >>>>>>
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8261 is alive
query-status from unique master
dmctl test cmd: "query-status"
[Mon May 13 16:55:13 CST 2024] <<<<<< start DM-147 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
got=1 expected=1
[Mon May 13 16:55:13 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join2.toml >>>>>>
wait for rpc addr 127.0.0.1:8361 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Mon May 13 16:55:14 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source1.yaml"
dmctl test cmd: "query-status test"
[Mon May 13 16:55:15 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source2.yaml"
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 /tmp/dm_test/shardddl4_1/schema.sql -s mysql-replica-01"
dmctl test cmd: "binlog replace test "alter table shardddl1.tb1 drop column b""
got=2 expected=2
got=1 expected=1
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:16 CST 2024] <<<<<< finish DM-147 optimistic >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "start-task /tmp/dm_test/all_mode/regexpr-task-lightning.yaml --remove-meta"
dmctl test cmd: "start-task /tmp/dm_test/all_mode/regexpr-task-lightning.yaml --remove-meta"
[Mon May 13 16:55:17 CST 2024] <<<<<< start DM-148 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
check diff successfully
dmctl test cmd: "stop-task test"
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
[Mon May 13 16:55:18 CST 2024] <<<<<< finish DM-INIT_SCHEMA optimistic >>>>>>
[Mon May 13 16:55:18 CST 2024] <<<<<< start DM-DROP_COLUMN_ALL_DONE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master.test exit...
check diff failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker 2
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker2 exit...
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:21 CST 2024] <<<<<< finish DM-148 pessimistic >>>>>>
[Mon May 13 16:55:21 CST 2024] <<<<<< start DM-148 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker2 exit...
process dm-worker2 already exit
[Mon May 13 16:55:21 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "query-status test"
got=2 expected=2
check log contain failed 1-th time, retry later
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:55:23 CST 2024] <<<<<< finish test_regexpr_router regexpr-task-lightning.yaml >>>>>>
[Mon May 13 16:55:23 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-master.toml >>>>>>
check diff successfully
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Mon May 13 16:55:23 CST 2024] <<<<<< finish DM-148 optimistic >>>>>>
wait for rpc addr 127.0.0.1:8361 alive the 2-th time
rpc addr 127.0.0.1:8361 is alive
[Mon May 13 16:55:24 CST 2024] <<<<<< start DM-149 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=1 expected=1
restart dm-master
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:55:25 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master exit...
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source1.yaml"
wait process dm-master exit...
process dm-master already exit
[Mon May 13 16:55:27 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:28 CST 2024] <<<<<< finish DM-149 pessimistic >>>>>>
[Mon May 13 16:55:28 CST 2024] <<<<<< start DM-149 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source2.yaml"
[Mon May 13 16:55:28 CST 2024] <<<<<< START DM-MASTER on port 8461, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join3.toml >>>>>>
wait for rpc addr 127.0.0.1:8461 alive the 1-th time
[Mon May 13 16:55:29 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8461 is alive
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "start-task /tmp/dm_test/all_mode/dm-task.yaml --remove-meta"
check diff successfully
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Mon May 13 16:55:30 CST 2024] <<<<<< finish DM-149 optimistic >>>>>>
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
dmctl test cmd: "query-status t-Ë!s`t"
got=1 expected=1
dmctl test cmd: "stop-task /tmp/dm_test/all_mode/dm-task.yaml"
dmctl test cmd: "start-task /tmp/dm_test/all_mode/dm-task.yaml --remove-meta"
got=1 expected=1
got=1 expected=1
restart dm-worker 2
[Mon May 13 16:55:31 CST 2024] <<<<<< start DM-150 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker2 exit...
check diff successfully
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "show-ddl-locks"
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-worker2 exit...
process dm-worker2 already exit
[Mon May 13 16:55:33 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status -s mysql-replica-01"
dmctl test cmd: "pause-task t-Ë!s`t"
dmctl test cmd: "operate-schema set -s mysql-replica-01 t-Ë!s`t -d all_mode -t no_diff /tmp/dm_test/all_mode/schema.sql"
dmctl test cmd: "resume-task t-Ë!s`t"
wait pattern dm-worker1.toml exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
wait pattern dm-worker1.toml exit...
check diff failed 1-th time, retry later
wait pattern dm-worker1.toml exit...
[Mon May 13 16:55:34 CST 2024] <<<<<< START DM-MASTER on port 8561, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join4.toml >>>>>>
wait for rpc addr 127.0.0.1:8561 alive the 1-th time
wait pattern dm-worker1.toml exit...
wait pattern dm-worker1.toml exit...
wait pattern dm-worker1.toml exit...
rpc addr 127.0.0.1:8561 is alive
wait pattern dm-worker1.toml exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:36 CST 2024] <<<<<< finish DM-150 pessimistic >>>>>>
[Mon May 13 16:55:36 CST 2024] <<<<<< start DM-150 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait pattern dm-worker1.toml exit...
pattern dm-worker1.toml already exit
[Mon May 13 16:55:36 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff failed 2-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "query-status t-Ë!s`t"
got=1 expected=1
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait pattern dm-worker2.toml exit...
wait pattern dm-worker2.toml exit...
got=3 expected=3
dmctl test cmd: "stop-task test"
wait pattern dm-worker2.toml exit...
wait pattern dm-worker2.toml exit...
[Mon May 13 16:55:38 CST 2024] <<<<<< finish DM-150 optimistic >>>>>>
wait pattern dm-worker2.toml exit...
wait pattern dm-worker2.toml exit...
wait pattern dm-worker2.toml exit...
pattern dm-worker2.toml already exit
[Mon May 13 16:55:38 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/all_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Mon May 13 16:55:39 CST 2024] <<<<<< start DM-151 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:39 CST 2024] <<<<<< finish DM-DROP_COLUMN_ALL_DONE optimistic >>>>>>
[Mon May 13 16:55:39 CST 2024] <<<<<< start DM-RECOVER_LOCK optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
dmctl test cmd: "query-status test"
got=2 expected=2
got=2 expected=2
dmctl test cmd: "show-ddl-locks"
got=1 expected=1
check log contain failed 1-th time, retry later
[Mon May 13 16:55:41 CST 2024] <<<<<< START DM-MASTER on port 8661, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master-join5.toml >>>>>>
wait for rpc addr 127.0.0.1:8661 alive the 1-th time
rpc addr 127.0.0.1:8661 is alive
dmctl test cmd: "query-status"
got=1 expected=1
dmctl test cmd: "query-status"
got=1 expected=1
dmctl test cmd: "query-status"
got=1 expected=1
dmctl test cmd: "query-status"
check log contain failed 1-th time, retry later
got=1 expected=1
join worker with dm-master1 endpoint
[Mon May 13 16:55:43 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker-join2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "list-member --worker --name=worker2"
check diff successfully
dmctl test cmd: "stop-task test"
got=1 expected=1
kill dm-master-join1
[Mon May 13 16:55:44 CST 2024] <<<<<< finish DM-151 pessimistic >>>>>>
[Mon May 13 16:55:44 CST 2024] <<<<<< start DM-151 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
restart dm-master
wait process dm-master-join1 exit...
wait process dm-master exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=3 expected=3
wait process dm-master-join1 exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master-join1 exit...
process dm-master-join1 already exit
dmctl test cmd: "list-member --worker --name=worker2"
got=1 expected=1
join worker with 5 masters endpoint
[Mon May 13 16:55:47 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker-join1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8262 alive the 2-th time
check diff failed 1-th time, retry later
[Mon May 13 16:55:49 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8262 alive the 3-th time
wait for rpc addr 127.0.0.1:8262 alive the 4-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8262 is alive
query-status from master2
dmctl test cmd: "query-status"
rpc addr 127.0.0.1:8261 is alive
restart dm-master
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:55:51 CST 2024] <<<<<< finish DM-151 optimistic >>>>>>
got=1 expected=1
[Mon May 13 16:55:51 CST 2024] <<<<<< finish test_join_masters_and_worker >>>>>>
[Mon May 13 16:55:51 CST 2024] <<<<<< start test_standalone_running >>>>>>
4 dm-master alive
2 dm-worker alive
0 dm-syncer alive
after restart dm-worker, task should resume automatically
dmctl test cmd: "start-task /tmp/dm_test/all_mode/dm-task.yaml"
HTTP 127.0.0.1:8261/apis/v1alpha1/status/t-Ë!s`t is alive
[Mon May 13 16:55:52 CST 2024] <<<<<< start DM-152 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master exit...
wait process dm-master exit...
process dm-master already exit
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
got=2 expected=2
check diff failed 1-th time, retry later
wait process dm-master.test exit...
wait process tidb-server exit...
process tidb-server already exit
[Mon May 13 16:55:55 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
check diff successfully
check diff failed 1-th time, retry later
wait process dm-master.test exit...
Starting TiDB on port 4000
Verifying TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
check diff failed 1-th time, retry later
check diff successfully
wait process dm-master.test exit...
check diff failed 1-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'test'@'%'
wait process dm-master.test exit...
check diff successfully
check diff failed 1-th time, retry later
wait process dm-master.test exit...
check diff failed 1-th time, retry later
check diff successfully
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "stop-task test"
[Mon May 13 16:56:02 CST 2024] <<<<<< finish DM-RECOVER_LOCK optimistic >>>>>>
run DM_DropAddColumn case #0
[Mon May 13 16:56:02 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff failed 2-th time, retry later
check diff successfully
check diff failed 1-th time, retry later
check diff successfully
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
dmctl test cmd: "shard-ddl-lock"
[Mon May 13 16:56:05 CST 2024] <<<<<< finish DM-152 optimistic >>>>>>
got=1 expected=1
dmctl test cmd: "query-status test"
check diff successfully
dmctl test cmd: "pause-relay -s mysql-replica-01"
dmctl test cmd: "resume-relay -s mysql-replica-01"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
wait process dm-master.test exit...
check diff failed 1-th time, retry later
[Mon May 13 16:56:06 CST 2024] <<<<<< start DM-153 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
use sync_diff_inspector to check increment data
check diff successfully
check diff successfully
data checked after one worker was killed
try to kill worker port 8263
wait process dm-worker2 exit...
wait process dm-worker2 exit...
process dm-worker2 already exit
worker2 was killed
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test2"
got=2 expected=2
[Mon May 13 16:56:05 CST 2024] <<<<<< finish test_multi_task_reduce_and_restart_worker >>>>>>
3 dm-master alive
3 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
check diff failed 2-th time, retry later
got=2 expected=2
dmctl test cmd: "query-status test"
relay logs dm-it-5282e9cf-b17e-45d0-b8a7-74faf6655098-40hw0-6rwd9-bin.000002
relay.meta
check diff successfully
check dump files have been cleaned
ls: cannot access /tmp/dm_test/all_mode/worker2/dumped_data.t-Ë!s`t: No such file or directory
worker2 auto removed dump files
check no password in log
dmctl test cmd: "query-status t-Ë!s`t"
got=1 expected=1
dmctl test cmd: "stop-task t-Ë!s`t"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 -s mysql-replica-01 --from-target"
dmctl test cmd: "binlog replace test "alter table shardddl1.tb1 drop column b""
matched
matched
[Mon May 13 16:56:08 CST 2024] <<<<<< start test_source_and_target_with_empty_gtid >>>>>>
1 dm-master alive
2 dm-worker alive
wait process dm-master.test exit...
got=2 expected=2
got=1 expected=1
check diff successfully
dmctl test cmd: "stop-task test"
0 dm-syncer alive
wait process dm-master.test exit...
[Mon May 13 16:56:09 CST 2024] <<<<<< finish DM-153 optimistic >>>>>>
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
[Mon May 13 16:56:10 CST 2024] <<<<<< start DM-154 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "binlog-schema update test shardddl1 tb1 -s mysql-replica-01 --from-source"
wait process dm-worker.test exit...
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
check diff successfully
wait process dm-master.test exit...
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
[Mon May 13 16:56:12 CST 2024] <<<<<< finish DM-154 optimistic >>>>>>
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
wait process dm-master.test exit...
[Mon May 13 16:56:13 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #1
[Mon May 13 16:56:13 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
[Mon May 13 16:56:13 CST 2024] <<<<<< start DM-155 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:56:14 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /tmp/dm_test/all_mode/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
restart dm-master
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
[Mon May 13 16:56:15 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /tmp/dm_test/all_mode/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/all_mode/source1.yaml"
wait process dm-master exit...
wait process dm-master.test exit...
got=2 expected=2
got=1 expected=1
check master alive
dmctl test cmd: "list-member"
restart master
restart dm-master
got=1 expected=1
gtid is empty
start task and check stage
dmctl test cmd: "start-task /tmp/dm_test/all_mode/dm-task-no-gtid.yaml --remove-meta=true"
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
wait process dm-master exit...
wait process dm-master.test exit...
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
got=1 expected=1
got=2 expected=2
check data
check diff successfully
ERROR 1146 (42S02) at line 1: Table 'all_mode.t2' doesn't exist
run tidb sql failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
[Mon May 13 16:56:18 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
wait process dm-master.test exit...
got=1 expected=1
check log contain failed 1-th time, retry later
[Mon May 13 16:56:20 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
check log contain failed 1-th time, retry later
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
wait process dm-master.test exit...
wait process dm-master.test exit...
restart master
restart dm-master
wait process dm-master.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
check diff failed 2-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
[Mon May 13 16:56:27 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:56:31 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #2
[Mon May 13 16:56:31 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master.test exit...
restart master
restart dm-master
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
wait process dm-master exit...
check diff successfully
restart dm-master
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
wait process dm-master.test exit...
[Mon May 13 16:56:36 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
[Mon May 13 16:56:37 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
wait process dm-master.test exit...
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
restart worker1
restart dm-worker1
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process worker1 exit...
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
check diff failed 2-th time, retry later
wait process worker1 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
wait process dm-master.test exit...
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
[Mon May 13 16:56:48 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #3
[Mon May 13 16:56:48 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process worker1 exit...
wait process dm-master.test exit...
wait process worker1 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
wait process worker1 exit...
process worker1 already exit
[Mon May 13 16:56:49 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
restart dm-master
dmctl test cmd: "query-status test"
restart worker2
restart dm-worker2
got=1 expected=1
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
wait process worker2 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process worker2 exit...
process worker2 already exit
[Mon May 13 16:56:53 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
rpc addr 127.0.0.1:8263 is alive
wait process dm-master.test exit...
[Mon May 13 16:56:55 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
restart worker1
restart dm-worker1
wait process dm-master.test exit...
import prepare data
start DM worker and master standalone cluster
[Mon May 13 16:56:56 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master1.toml >>>>>>
[Mon May 13 16:56:56 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master2.toml >>>>>>
[Mon May 13 16:56:56 CST 2024] <<<<<< START DM-MASTER on port 8461, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master3.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
wait process worker1 exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
wait process worker1 exit...
process worker1 already exit
[Mon May 13 16:56:57 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
rpc addr 127.0.0.1:8361 is alive
rpc addr 127.0.0.1:8461 is alive
[Mon May 13 16:56:58 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:8262 is alive
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
operate mysql config to worker
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source1.yaml"
restart worker2
restart dm-worker2
check diff failed 1-th time, retry later
start DM task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task.yaml "
wait process worker2 exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
use sync_diff_inspector to check full dump loader
check diff successfully
flush logs to force rotate binlog file
apply increment data before restart dm-worker to ensure entering increment phase
use sync_diff_inspector to check increment data
wait process worker2 exit...
process worker2 already exit
[Mon May 13 16:57:02 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
ERROR: Failed to launch dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-6srcn
io.fabric8.kubernetes.client.KubernetesClientTimeoutException: Timed out waiting for [1000000] milliseconds for [Pod] with name:[dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-6srcn] in namespace [jenkins-tiflow].
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilCondition(BaseOperation.java:939)
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:921)
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:97)
	at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:185)
	at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297)
	at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46)
	at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:829)
ERROR: Failed to launch dm-it-5282e9cf-b17e-45d0-b8a7-74faf6655098-40hw0-qz6dw
io.fabric8.kubernetes.client.KubernetesClientTimeoutException: Timed out waiting for [1000000] milliseconds for [Pod] with name:[dm-it-5282e9cf-b17e-45d0-b8a7-74faf6655098-40hw0-qz6dw] in namespace [jenkins-tiflow].
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilCondition(BaseOperation.java:939)
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:921)
	at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:97)
	at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:185)
	at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297)
	at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46)
	at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
	at java.base/java.lang.Thread.run(Thread.java:829)
check diff failed 1-th time, retry later
check diff failed 2-th time, retry later
wait process dm-master.test exit...
rpc addr 127.0.0.1:8263 is alive
wait process dm-master.test exit...
restart worker2
restart dm-worker2
check diff failed 3-th time, retry later
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source2.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
[Mon May 13 16:57:04 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process worker2 exit...
wait process dm-master.test exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
wait process worker2 exit...
process worker2 already exit
[Mon May 13 16:57:06 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
wait process dm-master.test exit...
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:57:07 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #4
[Mon May 13 16:57:07 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status"
got=2 expected=2
kill worker2
rpc addr 127.0.0.1:8263 is alive
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:57:07 CST 2024] <<<<<< test case ha_cases2 success! >>>>>>
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-release-7.5-pull_dm_integration_test-348/tiflow-dm already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
dmctl test cmd: "query-status test"
got=2 expected=2
[Pipeline] }
wait process dm-worker2 exit...
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
check diff successfully
check log contain failed 1-th time, retry later
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
check log contain failed 1-th time, retry later
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "query-status"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "stop-task test2"
got=1 expected=1
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
got=1 expected=1
dmctl test cmd: "query-status test"
got=1 expected=1
[Mon May 13 16:57:10 CST 2024] <<<<<< finish test_standalone_running >>>>>>
3 dm-master alive
1 dm-worker alive
0 dm-syncer alive
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
restart dm-master
wait process dm-master.test exit...
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
restart master
restart dm-master
wait process dm-master exit...
wait process dm-master.test exit...
[Mon May 13 16:57:15 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
[Mon May 13 16:57:17 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:57:19 CST 2024] <<<<<< test case ha_cases success! >>>>>>
start running case: [http_proxies] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
test dm grpc proxy env setting checking for http_proxy=http://127.0.0.1:8080
[Mon May 13 16:57:19 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Mon May 13 16:57:20 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
check log contain failed 1-th time, retry later
check diff failed 2-th time, retry later
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "query-status test"
got=1 expected=1
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
check log contain failed 1-th time, retry later
dmctl test cmd: "resume-task test"
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Mon May 13 16:57:25 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #5
[Mon May 13 16:57:25 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker.test exit...
restart worker1
restart dm-worker1
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
wait process worker1 exit...
wait process dm-worker.test exit...
wait process worker1 exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
test dm grpc proxy env setting checking for https_proxy=https://127.0.0.1:8080
[Mon May 13 16:57:28 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process worker1 exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
restart dm-master
wait process worker1 exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master exit...
rpc addr 127.0.0.1:8261 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Mon May 13 16:57:31 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process worker1 exit...
process worker1 already exit
[Mon May 13 16:57:31 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master exit...
process dm-master already exit
rpc addr 127.0.0.1:8262 is alive
check log contain failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
check log contain failed 1-th time, retry later
[Mon May 13 16:57:34 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
process dm-master.test already exit
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker.test exit...
check log contain failed 1-th time, retry later
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-worker.test exit...
restart master
restart dm-master
wait process dm-worker.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 1-th time, will retry again
wait process dm-master exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
test dm grpc proxy env setting checking for no_proxy=localhost,127.0.0.1
[Mon May 13 16:57:40 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master exit...
process dm-master already exit
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 2-th time, will retry again
[Mon May 13 16:57:42 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Mon May 13 16:57:42 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 3-th time, will retry again
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 4-th time, will retry again
wait process dm-master.test exit...
process dm-master.test already exit
restart worker1
restart dm-worker1
wait process dm-worker.test exit...
wait process worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 5-th time, will retry again
wait process dm-worker.test exit...
wait process worker1 exit...
wait process dm-worker.test exit...
wait process worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-worker.test exit...
wait process worker1 exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:57:51 CST 2024] <<<<<< test case http_proxies success! >>>>>>
start running case: [lightning_load_task] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
import prepare data
start DM master, workers and sources
[Mon May 13 16:57:51 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 7-th time, will retry again
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process worker1 exit...
rpc addr 127.0.0.1:8261 is alive
[Mon May 13 16:57:53 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process worker1 exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 8-th time, will retry again
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/lightning_load_task/source1.yaml"
[Mon May 13 16:57:55 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-worker2.toml >>>>>>
wait process worker1 exit...
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process worker1 exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/lightning_load_task/source2.yaml"
dmctl test cmd: "query-status test"
got=1 expected=1
<<<<<< test_source_and_target_with_empty_gtid success! >>>>>>
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process worker1 exit...
process worker1 already exit
[Mon May 13 16:57:56 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 9-th time, will retry again
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon May 13 16:57:57 CST 2024] <<<<<< test case all_mode success! >>>>>>
[Mon May 13 16:57:57 CST 2024] <<<<<< START DM-WORKER on port 8264, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-worker3.toml >>>>>>
wait for rpc addr 127.0.0.1:8264 alive the 1-th time
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-release-7.5-pull_dm_integration_test-348/tiflow-dm already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
rpc addr 127.0.0.1:8262 is alive
[Pipeline] // timeout
[Pipeline] }
rpc addr 127.0.0.1:8264 is alive
start DM task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-task.yaml --remove-meta"
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/lightning_load_task/conf/dm-task2.yaml --remove-meta"
{
    "result": true,
    "msg": "",
    "sources": [
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-01",
                "worker": "worker1",
                "result": null,
                "relayStatus": null
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Paused",
                    "unit": "Sync",
                    "result": {
                        "isCanceled": false,
                        "errors": [
                            {
                                "ErrCode": 42501,
                                "ErrClass": "ha",
                                "ErrScope": "internal",
                                "ErrLevel": "high",
                                "Message": "startLocation: [position: (dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 42228), gtid-set: 6311056e-1105-11ef-bf3e-5a916470e59f:1-194], endLocation: [position: (dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 42353), gtid-set: 6311056e-1105-11ef-bf3e-5a916470e59f:1-195], origin SQL: [alter table shardddl1.tb1 add column b int after a]: fail to do etcd txn operation: txn commit failed",
                                "RawCause": "rpc error: code = Unavailable desc = error reading from server: EOF",
                                "Workaround": "Please check dm-master's node status and the network between this node and dm-master"
                            }
                        ],
                        "detail": null
                    },
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "12",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 42353)",
                        "masterBinlogGtid": "6311056e-1105-11ef-bf3e-5a916470e59f:1-195",
                        "syncerBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 42163)",
                        "syncerBinlogGtid": "6311056e-1105-11ef-bf3e-5a916470e59f:1-194",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "remote",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "12",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        },
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-02",
                "worker": "worker2",
                "result": null,
                "relayStatus": {
                    "masterBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 39206)",
                    "masterBinlogGtid": "6392ae46-1105-11ef-88f2-5a916470e59f:1-167",
                    "relaySubDir": "6392ae46-1105-11ef-88f2-5a916470e59f.000001",
                    "relayBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 39206)",
                    "relayBinlogGtid": "6392ae46-1105-11ef-88f2-5a916470e59f:1-167",
                    "relayCatchUpMaster": true,
                    "stage": "Running",
                    "result": null
                }
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Running",
                    "unit": "Sync",
                    "result": null,
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "6",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin.000001, 39206)",
                        "masterBinlogGtid": "6392ae46-1105-11ef-88f2-5a916470e59f:1-167",
                        "syncerBinlog": "(dm-it-2b532da4-39e3-42ab-add0-39ea936c0b1c-t1wjw-9z2c8-bin|000001.000001, 38926)",
                        "syncerBinlogGtid": "6392ae46-1105-11ef-88f2-5a916470e59f:1-166",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "local",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "6",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        }
    ]
}
PASS
coverage: 4.0% of statements in github.com/pingcap/tiflow/dm/...
curl: (7) Failed connect to 127.0.0.1:8361; Connection refused
curl: (7) Failed connect to 127.0.0.1:8461; Connection refused
curl: (7) Failed connect to 127.0.0.1:8561; Connection refused
curl: (7) Failed connect to 127.0.0.1:8661; Connection refused
curl: (7) Failed connect to 127.0.0.1:8761; Connection refused
curl: (7) Failed connect to 127.0.0.1:8264; Connection refused
curl: (7) Failed connect to 127.0.0.1:18262; Connection refused
curl: (7) Failed connect to 127.0.0.1:18263; Connection refused
make: *** [dm_integration_test_in_group] Error 1
check log contain failed 1-th time, retry later
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
Post stage
[Pipeline] sh
+ ls /tmp/dm_test
cov.shardddl1.dmctl.1715590074.673.out
cov.shardddl1.dmctl.1715590075.808.out
cov.shardddl1.dmctl.1715590080.1061.out
cov.shardddl1.dmctl.1715590081.1113.out
cov.shardddl1.dmctl.1715590104.4243.out
cov.shardddl1.dmctl.1715590108.4569.out
cov.shardddl1.dmctl.1715590109.4619.out
cov.shardddl1.dmctl.1715590140.5122.out
cov.shardddl1.dmctl.1715590144.5455.out
cov.shardddl1.dmctl.1715590145.5502.out
cov.shardddl1.dmctl.1715590162.6113.out
cov.shardddl1.dmctl.1715590166.6458.out
cov.shardddl1.dmctl.1715590168.6500.out
cov.shardddl1.dmctl.1715590168.6593.out
cov.shardddl1.dmctl.1715590169.6736.out
cov.shardddl1.dmctl.1715590170.6779.out
cov.shardddl1.dmctl.1715590172.6884.out
cov.shardddl1.dmctl.1715590173.7037.out
cov.shardddl1.dmctl.1715590174.7087.out
cov.shardddl1.dmctl.1715590175.7212.out
cov.shardddl1.dmctl.1715590175.7256.out
cov.shardddl1.dmctl.1715590175.7295.out
cov.shardddl1.dmctl.1715590175.7334.out
cov.shardddl1.dmctl.1715590176.7384.out
cov.shardddl1.dmctl.1715590184.7574.out
cov.shardddl1.dmctl.1715590186.7643.out
cov.shardddl1.dmctl.1715590186.7681.out
cov.shardddl1.dmctl.1715590186.7720.out
cov.shardddl1.dmctl.1715590187.7871.out
cov.shardddl1.dmctl.1715590188.7906.out
cov.shardddl1.dmctl.1715590190.8113.out
cov.shardddl1.dmctl.1715590191.8262.out
cov.shardddl1.dmctl.1715590192.8300.out
cov.shardddl1.dmctl.1715590193.8426.out
cov.shardddl1.dmctl.1715590193.8466.out
cov.shardddl1.dmctl.1715590193.8615.out
cov.shardddl1.dmctl.1715590194.8656.out
cov.shardddl1.dmctl.1715590195.8767.out
cov.shardddl1.dmctl.1715590198.8903.out
cov.shardddl1.dmctl.1715590198.8945.out
cov.shardddl1.dmctl.1715590198.8982.out
cov.shardddl1.dmctl.1715590203.9265.out
cov.shardddl1.dmctl.1715590204.9323.out
cov.shardddl1.dmctl.1715590205.9368.out
cov.shardddl1.dmctl.1715590209.9518.out
cov.shardddl1.dmctl.1715590210.9669.out
cov.shardddl1.dmctl.1715590211.9704.out
cov.shardddl1.dmctl.1715590216.9843.out
cov.shardddl1.dmctl.1715590251.10141.out
cov.shardddl1.dmctl.1715590251.10193.out
cov.shardddl1.dmctl.1715590252.10241.out
cov.shardddl1.dmctl.1715590254.10337.out
cov.shardddl1.dmctl.1715590255.10381.out
cov.shardddl1.dmctl.1715590264.10563.out
cov.shardddl1.dmctl.1715590264.10607.out
cov.shardddl1.dmctl.1715590264.10647.out
cov.shardddl1.dmctl.1715590266.10793.out
cov.shardddl1.dmctl.1715590267.10853.out
cov.shardddl1.dmctl.1715590269.10949.out
cov.shardddl1.dmctl.1715590269.10991.out
cov.shardddl1.dmctl.1715590276.11186.out
cov.shardddl1.dmctl.1715590276.11226.out
cov.shardddl1.dmctl.1715590276.11274.out
cov.shardddl1.dmctl.1715590277.11419.out
cov.shardddl1.dmctl.1715590278.11455.out
cov.shardddl1.dmctl.1715590287.11966.out
cov.shardddl1.dmctl.1715590288.12115.out
cov.shardddl1.dmctl.1715590289.12158.out
cov.shardddl1.dmctl.1715590295.12611.out
cov.shardddl1.dmctl.1715590295.12759.out
cov.shardddl1.dmctl.1715590296.12799.out
cov.shardddl1.dmctl.1715590299.13105.out
cov.shardddl1.dmctl.1715590299.13254.out
cov.shardddl1.dmctl.1715590301.13296.out
cov.shardddl1.dmctl.1715590304.13604.out
cov.shardddl1.dmctl.1715590307.13930.out
cov.shardddl1.dmctl.1715590308.13971.out
cov.shardddl1.dmctl.1715590311.14033.out
cov.shardddl1.dmctl.1715590311.14078.out
cov.shardddl1.dmctl.1715590314.14427.out
cov.shardddl1.dmctl.1715590315.14477.out
cov.shardddl1.dmctl.1715590316.14624.out
cov.shardddl1.dmctl.1715590319.14957.out
cov.shardddl1.dmctl.1715590322.15004.out
cov.shardddl1.dmctl.1715590322.15050.out
cov.shardddl1.dmctl.1715590322.15097.out
cov.shardddl1.dmctl.1715590323.15244.out
cov.shardddl1.dmctl.1715590325.15285.out
cov.shardddl1.dmctl.1715590325.15369.out
cov.shardddl1.dmctl.1715590326.15514.out
cov.shardddl1.dmctl.1715590327.15556.out
cov.shardddl1.dmctl.1715590334.15750.out
cov.shardddl1.dmctl.1715590335.15895.out
cov.shardddl1.dmctl.1715590337.15946.out
cov.shardddl1.dmctl.1715590339.16048.out
cov.shardddl1.dmctl.1715590340.16196.out
cov.shardddl1.dmctl.1715590342.16233.out
cov.shardddl1.dmctl.1715590342.16325.out
cov.shardddl1.master.out
cov.shardddl1.worker.8262.1715590073.out
cov.shardddl1.worker.8262.1715590079.out
cov.shardddl1.worker.8262.1715590107.out
cov.shardddl1.worker.8262.1715590142.out
cov.shardddl1.worker.8262.1715590164.out
cov.shardddl1.worker.8262.1715590306.out
cov.shardddl1.worker.8262.1715590313.out
cov.shardddl1.worker.8262.1715590318.out
cov.shardddl1.worker.8263.1715590074.out
cov.shardddl1.worker.8263.1715590079.out
cov.shardddl1.worker.8263.1715590107.out
cov.shardddl1.worker.8263.1715590142.out
cov.shardddl1.worker.8263.1715590164.out
cov.shardddl1.worker.8263.1715590306.out
cov.shardddl1.worker.8263.1715590313.out
cov.shardddl1.worker.8263.1715590318.out
cov.shardddl1_1.dmctl.1715590350.16789.out
cov.shardddl1_1.dmctl.1715590352.16929.out
cov.shardddl1_1.dmctl.1715590353.17021.out
cov.shardddl1_1.dmctl.1715590354.17073.out
cov.shardddl1_1.dmctl.1715590357.17422.out
cov.shardddl1_1.dmctl.1715590358.17569.out
cov.shardddl1_1.dmctl.1715590359.17620.out
cov.shardddl1_1.dmctl.1715590361.17738.out
cov.shardddl1_1.dmctl.1715590363.17888.out
cov.shardddl1_1.dmctl.1715590364.17940.out
cov.shardddl1_1.dmctl.1715590366.18067.out
cov.shardddl1_1.dmctl.1715590367.18214.out
cov.shardddl1_1.dmctl.1715590369.18264.out
cov.shardddl1_1.dmctl.1715590371.18397.out
cov.shardddl1_1.dmctl.1715590372.18545.out
cov.shardddl1_1.dmctl.1715590374.18587.out
cov.shardddl1_1.dmctl.1715590376.18708.out
cov.shardddl1_1.dmctl.1715590377.18855.out
cov.shardddl1_1.dmctl.1715590379.18887.out
cov.shardddl1_1.dmctl.1715590381.19022.out
cov.shardddl1_1.dmctl.1715590383.19167.out
cov.shardddl1_1.dmctl.1715590384.19206.out
cov.shardddl1_1.dmctl.1715590384.19283.out
cov.shardddl1_1.dmctl.1715590386.19434.out
cov.shardddl1_1.dmctl.1715590387.19481.out
cov.shardddl1_1.dmctl.1715590387.19550.out
cov.shardddl1_1.dmctl.1715590388.19698.out
cov.shardddl1_1.dmctl.1715590390.19739.out
cov.shardddl1_1.dmctl.1715590390.19843.out
cov.shardddl1_1.dmctl.1715590391.19991.out
cov.shardddl1_1.dmctl.1715590393.20032.out
cov.shardddl1_1.dmctl.1715590395.20120.out
cov.shardddl1_1.dmctl.1715590396.20257.out
cov.shardddl1_1.dmctl.1715590397.20298.out
cov.shardddl1_1.dmctl.1715590400.20360.out
cov.shardddl1_1.dmctl.1715590401.20499.out
cov.shardddl1_1.dmctl.1715590402.20534.out
cov.shardddl1_1.dmctl.1715590404.20591.out
cov.shardddl1_1.dmctl.1715590406.20734.out
cov.shardddl1_1.dmctl.1715590407.20774.out
cov.shardddl1_1.dmctl.1715590409.20839.out
cov.shardddl1_1.dmctl.1715590411.20978.out
cov.shardddl1_1.dmctl.1715590412.21014.out
cov.shardddl1_1.dmctl.1715590414.21075.out
cov.shardddl1_1.dmctl.1715590415.21219.out
cov.shardddl1_1.dmctl.1715590417.21251.out
cov.shardddl1_1.dmctl.1715590417.21297.out
cov.shardddl1_1.dmctl.1715590417.21438.out
cov.shardddl1_1.dmctl.1715590418.21478.out
cov.shardddl1_1.dmctl.1715590419.21525.out
cov.shardddl1_1.dmctl.1715590420.21664.out
cov.shardddl1_1.dmctl.1715590421.21706.out
cov.shardddl1_1.dmctl.1715590423.21773.out
cov.shardddl1_1.dmctl.1715590425.21920.out
cov.shardddl1_1.dmctl.1715590426.21961.out
cov.shardddl1_1.dmctl.1715590428.22013.out
cov.shardddl1_1.dmctl.1715590429.22152.out
cov.shardddl1_1.dmctl.1715590431.22188.out
cov.shardddl1_1.dmctl.1715590433.22250.out
cov.shardddl1_1.dmctl.1715590434.22401.out
cov.shardddl1_1.dmctl.1715590435.22438.out
cov.shardddl1_1.dmctl.1715590436.22475.out
cov.shardddl1_1.dmctl.1715590436.22516.out
cov.shardddl1_1.dmctl.1715590437.22664.out
cov.shardddl1_1.dmctl.1715590438.22704.out
cov.shardddl1_1.dmctl.1715590439.22797.out
cov.shardddl1_1.dmctl.1715590441.22950.out
cov.shardddl1_1.dmctl.1715590443.22987.out
cov.shardddl1_1.dmctl.1715590443.23054.out
cov.shardddl1_1.dmctl.1715590443.23095.out
cov.shardddl1_1.dmctl.1715590444.23239.out
cov.shardddl1_1.dmctl.1715590446.23276.out
cov.shardddl1_1.dmctl.1715590446.23318.out
cov.shardddl1_1.dmctl.1715590446.23359.out
cov.shardddl1_1.dmctl.1715590447.23507.out
cov.shardddl1_1.dmctl.1715590448.23538.out
cov.shardddl1_1.dmctl.1715590451.23677.out
cov.shardddl1_1.dmctl.1715590451.23820.out
cov.shardddl1_1.dmctl.1715590452.23861.out
cov.shardddl1_1.dmctl.1715590453.23960.out
cov.shardddl1_1.dmctl.1715590454.24110.out
cov.shardddl1_1.dmctl.1715590455.24149.out
cov.shardddl1_1.dmctl.1715590455.24220.out
cov.shardddl1_1.dmctl.1715590456.24364.out
cov.shardddl1_1.dmctl.1715590457.24402.out
cov.shardddl1_1.dmctl.1715590457.24467.out
cov.shardddl1_1.dmctl.1715590457.24509.out
cov.shardddl1_1.dmctl.1715590459.24661.out
cov.shardddl1_1.dmctl.1715590460.24697.out
cov.shardddl1_1.dmctl.1715590460.24831.out
cov.shardddl1_1.dmctl.1715590462.24988.out
cov.shardddl1_1.dmctl.1715590463.25028.out
cov.shardddl1_1.dmctl.1715590465.25178.out
cov.shardddl1_1.dmctl.1715590467.25331.out
cov.shardddl1_1.dmctl.1715590468.25370.out
cov.shardddl1_1.dmctl.1715590468.25439.out
cov.shardddl1_1.dmctl.1715590468.25479.out
cov.shardddl1_1.dmctl.1715590469.25629.out
cov.shardddl1_1.dmctl.1715590470.25664.out
cov.shardddl1_1.dmctl.1715590470.25736.out
cov.shardddl1_1.dmctl.1715590470.25769.out
cov.shardddl1_1.dmctl.1715590472.25924.out
cov.shardddl1_1.dmctl.1715590473.25962.out
cov.shardddl1_1.dmctl.1715590475.26076.out
cov.shardddl1_1.master.out
cov.shardddl1_1.worker.8262.1715590349.out
cov.shardddl1_1.worker.8263.1715590351.out
cov.shardddl2.dmctl.1715590484.26539.out
cov.shardddl2.dmctl.1715590485.26675.out
cov.shardddl2.dmctl.1715590486.26769.out
cov.shardddl2.dmctl.1715590488.26820.out
cov.shardddl2.dmctl.1715590494.27027.out
cov.shardddl2.dmctl.1715590501.27234.out
cov.shardddl2.dmctl.1715590504.27412.out
cov.shardddl2.dmctl.1715590507.27528.out
cov.shardddl2.dmctl.1715590507.27677.out
cov.shardddl2.dmctl.1715590508.27732.out
cov.shardddl2.dmctl.1715590517.28001.out
cov.shardddl2.dmctl.1715590518.28082.out
cov.shardddl2.dmctl.1715590518.28229.out
cov.shardddl2.dmctl.1715590519.28270.out
cov.shardddl2.dmctl.1715590525.28468.out
cov.shardddl2.dmctl.1715590531.28660.out
cov.shardddl2.dmctl.1715590534.28832.out
cov.shardddl2.dmctl.1715590539.28969.out
cov.shardddl2.dmctl.1715590539.29116.out
cov.shardddl2.dmctl.1715590540.29170.out
cov.shardddl2.dmctl.1715590561.29703.out
cov.shardddl2.dmctl.1715590561.29742.out
cov.shardddl2.dmctl.1715590562.29892.out
cov.shardddl2.dmctl.1715590563.29944.out
cov.shardddl2.dmctl.1715590565.30063.out
cov.shardddl2.dmctl.1715590565.30116.out
cov.shardddl2.dmctl.1715590566.30165.out
cov.shardddl2.dmctl.1715590572.30322.out
cov.shardddl2.dmctl.1715590572.30370.out
cov.shardddl2.dmctl.1715590572.30409.out
cov.shardddl2.dmctl.1715590572.30511.out
cov.shardddl2.dmctl.1715590573.30650.out
cov.shardddl2.dmctl.1715590574.30708.out
cov.shardddl2.dmctl.1715590584.30970.out
cov.shardddl2.dmctl.1715590584.31032.out
cov.shardddl2.dmctl.1715590584.31079.out
cov.shardddl2.dmctl.1715590590.31257.out
cov.shardddl2.dmctl.1715590590.31303.out
cov.shardddl2.dmctl.1715590591.31345.out
cov.shardddl2.dmctl.1715590591.31445.out
cov.shardddl2.dmctl.1715590591.31595.out
cov.shardddl2.dmctl.1715590592.31641.out
cov.shardddl2.dmctl.1715590600.31899.out
cov.shardddl2.dmctl.1715590600.31958.out
cov.shardddl2.dmctl.1715590600.32007.out
cov.shardddl2.dmctl.1715590607.32162.out
cov.shardddl2.dmctl.1715590607.32204.out
cov.shardddl2.dmctl.1715590607.32244.out
cov.shardddl2.dmctl.1715590607.32347.out
cov.shardddl2.dmctl.1715590608.32494.out
cov.shardddl2.dmctl.1715590609.32529.out
cov.shardddl2.dmctl.1715590611.32644.out
cov.shardddl2.dmctl.1715590618.32858.out
cov.shardddl2.dmctl.1715590619.32906.out
cov.shardddl2.dmctl.1715590626.33062.out
cov.shardddl2.dmctl.1715590626.33105.out
cov.shardddl2.dmctl.1715590626.33143.out
cov.shardddl2.dmctl.1715590626.33239.out
cov.shardddl2.dmctl.1715590627.33382.out
cov.shardddl2.dmctl.1715590628.33426.out
cov.shardddl2.dmctl.1715590630.33542.out
cov.shardddl2.dmctl.1715590638.33756.out
cov.shardddl2.dmctl.1715590638.33802.out
cov.shardddl2.dmctl.1715590644.33956.out
cov.shardddl2.dmctl.1715590644.34003.out
cov.shardddl2.dmctl.1715590644.34045.out
cov.shardddl2.dmctl.1715590645.34151.out
cov.shardddl2.dmctl.1715590645.34295.out
cov.shardddl2.dmctl.1715590646.34334.out
cov.shardddl2.dmctl.1715590649.34451.out
cov.shardddl2.dmctl.1715590649.34510.out
cov.shardddl2.dmctl.1715590657.34700.out
cov.shardddl2.master.out
cov.shardddl2.worker.8262.1715590483.out
cov.shardddl2.worker.8262.1715590490.out
cov.shardddl2.worker.8263.1715590484.out
cov.shardddl2.worker.8263.1715590521.out
downstream
goroutines
shardddl1
shardddl1_1
shardddl2
sql_res.shardddl1.txt
sql_res.shardddl1_1.txt
sql_res.shardddl2.txt
tidb.toml
++ find /tmp/dm_test/ -type f -name '*.log'
+ tar -cvzf log-G07.tar.gz /tmp/dm_test/shardddl1/dmctl.1715590339.log /tmp/dm_test/shardddl1/dmctl.1715590198.log /tmp/dm_test/shardddl1/dmctl.1715590204.log /tmp/dm_test/shardddl1/dmctl.1715590109.log /tmp/dm_test/shardddl1/dmctl.1715590168.log /tmp/dm_test/shardddl1/dmctl.1715590287.log /tmp/dm_test/shardddl1/dmctl.1715590326.log /tmp/dm_test/shardddl1/dmctl.1715590074.log /tmp/dm_test/shardddl1/dmctl.1715590190.log /tmp/dm_test/shardddl1/dmctl.1715590340.log /tmp/dm_test/shardddl1/dmctl.1715590193.log /tmp/dm_test/shardddl1/dmctl.1715590162.log /tmp/dm_test/shardddl1/dmctl.1715590192.log /tmp/dm_test/shardddl1/dmctl.1715590276.log /tmp/dm_test/shardddl1/dmctl.1715590194.log /tmp/dm_test/shardddl1/dmctl.1715590255.log /tmp/dm_test/shardddl1/dmctl.1715590188.log /tmp/dm_test/shardddl1/dmctl.1715590184.log /tmp/dm_test/shardddl1/worker1/log/stdout.log /tmp/dm_test/shardddl1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715590211.log /tmp/dm_test/shardddl1/dmctl.1715590296.log /tmp/dm_test/shardddl1/dmctl.1715590304.log /tmp/dm_test/shardddl1/dmctl.1715590267.log /tmp/dm_test/shardddl1/dmctl.1715590269.log /tmp/dm_test/shardddl1/dmctl.1715590176.log /tmp/dm_test/shardddl1/dmctl.1715590174.log /tmp/dm_test/shardddl1/dmctl.1715590301.log /tmp/dm_test/shardddl1/dmctl.1715590289.log /tmp/dm_test/shardddl1/dmctl.1715590191.log /tmp/dm_test/shardddl1/dmctl.1715590169.log /tmp/dm_test/shardddl1/dmctl.1715590266.log /tmp/dm_test/shardddl1/dmctl.1715590327.log /tmp/dm_test/shardddl1/dmctl.1715590288.log /tmp/dm_test/shardddl1/dmctl.1715590166.log /tmp/dm_test/shardddl1/dmctl.1715590209.log /tmp/dm_test/shardddl1/dmctl.1715590295.log /tmp/dm_test/shardddl1/dmctl.1715590335.log /tmp/dm_test/shardddl1/dmctl.1715590175.log /tmp/dm_test/shardddl1/dmctl.1715590325.log /tmp/dm_test/shardddl1/dmctl.1715590186.log /tmp/dm_test/shardddl1/dmctl.1715590277.log /tmp/dm_test/shardddl1/dmctl.1715590308.log /tmp/dm_test/shardddl1/dmctl.1715590311.log /tmp/dm_test/shardddl1/dmctl.1715590173.log /tmp/dm_test/shardddl1/dmctl.1715590278.log /tmp/dm_test/shardddl1/dmctl.1715590323.log /tmp/dm_test/shardddl1/dmctl.1715590172.log /tmp/dm_test/shardddl1/dmctl.1715590080.log /tmp/dm_test/shardddl1/dmctl.1715590140.log /tmp/dm_test/shardddl1/dmctl.1715590075.log /tmp/dm_test/shardddl1/dmctl.1715590337.log /tmp/dm_test/shardddl1/dmctl.1715590216.log /tmp/dm_test/shardddl1/dmctl.1715590254.log /tmp/dm_test/shardddl1/dmctl.1715590187.log /tmp/dm_test/shardddl1/dmctl.1715590334.log /tmp/dm_test/shardddl1/dmctl.1715590195.log /tmp/dm_test/shardddl1/dmctl.1715590108.log /tmp/dm_test/shardddl1/dmctl.1715590203.log /tmp/dm_test/shardddl1/dmctl.1715590210.log /tmp/dm_test/shardddl1/worker2/log/stdout.log /tmp/dm_test/shardddl1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715590316.log /tmp/dm_test/shardddl1/dmctl.1715590342.log /tmp/dm_test/shardddl1/sync_diff_stdout.log /tmp/dm_test/shardddl1/dmctl.1715590251.log /tmp/dm_test/shardddl1/dmctl.1715590205.log /tmp/dm_test/shardddl1/dmctl.1715590081.log /tmp/dm_test/shardddl1/dmctl.1715590322.log /tmp/dm_test/shardddl1/dmctl.1715590307.log /tmp/dm_test/shardddl1/dmctl.1715590170.log /tmp/dm_test/shardddl1/dmctl.1715590264.log /tmp/dm_test/shardddl1/dmctl.1715590144.log /tmp/dm_test/shardddl1/dmctl.1715590299.log /tmp/dm_test/shardddl1/dmctl.1715590252.log /tmp/dm_test/shardddl1/dmctl.1715590319.log /tmp/dm_test/shardddl1/dmctl.1715590314.log /tmp/dm_test/shardddl1/dmctl.1715590104.log /tmp/dm_test/shardddl1/dmctl.1715590145.log /tmp/dm_test/shardddl1/dmctl.1715590315.log /tmp/dm_test/shardddl1/master/log/stdout.log /tmp/dm_test/shardddl1/master/log/dm-master.log /tmp/dm_test/shardddl1_1/dmctl.1715590404.log /tmp/dm_test/shardddl1_1/dmctl.1715590374.log /tmp/dm_test/shardddl1_1/dmctl.1715590376.log /tmp/dm_test/shardddl1_1/dmctl.1715590467.log /tmp/dm_test/shardddl1_1/dmctl.1715590421.log /tmp/dm_test/shardddl1_1/dmctl.1715590372.log /tmp/dm_test/shardddl1_1/dmctl.1715590423.log /tmp/dm_test/shardddl1_1/dmctl.1715590420.log /tmp/dm_test/shardddl1_1/dmctl.1715590452.log /tmp/dm_test/shardddl1_1/dmctl.1715590409.log /tmp/dm_test/shardddl1_1/dmctl.1715590428.log /tmp/dm_test/shardddl1_1/dmctl.1715590417.log /tmp/dm_test/shardddl1_1/dmctl.1715590401.log /tmp/dm_test/shardddl1_1/dmctl.1715590473.log /tmp/dm_test/shardddl1_1/dmctl.1715590475.log /tmp/dm_test/shardddl1_1/dmctl.1715590463.log /tmp/dm_test/shardddl1_1/dmctl.1715590407.log /tmp/dm_test/shardddl1_1/dmctl.1715590354.log /tmp/dm_test/shardddl1_1/worker1/log/stdout.log /tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715590419.log /tmp/dm_test/shardddl1_1/dmctl.1715590468.log /tmp/dm_test/shardddl1_1/dmctl.1715590453.log /tmp/dm_test/shardddl1_1/dmctl.1715590357.log /tmp/dm_test/shardddl1_1/dmctl.1715590371.log /tmp/dm_test/shardddl1_1/dmctl.1715590447.log /tmp/dm_test/shardddl1_1/dmctl.1715590383.log /tmp/dm_test/shardddl1_1/dmctl.1715590451.log /tmp/dm_test/shardddl1_1/dmctl.1715590456.log /tmp/dm_test/shardddl1_1/dmctl.1715590438.log /tmp/dm_test/shardddl1_1/dmctl.1715590388.log /tmp/dm_test/shardddl1_1/dmctl.1715590415.log /tmp/dm_test/shardddl1_1/dmctl.1715590369.log /tmp/dm_test/shardddl1_1/dmctl.1715590465.log /tmp/dm_test/shardddl1_1/dmctl.1715590350.log /tmp/dm_test/shardddl1_1/dmctl.1715590462.log /tmp/dm_test/shardddl1_1/dmctl.1715590386.log /tmp/dm_test/shardddl1_1/dmctl.1715590400.log /tmp/dm_test/shardddl1_1/dmctl.1715590444.log /tmp/dm_test/shardddl1_1/dmctl.1715590384.log /tmp/dm_test/shardddl1_1/dmctl.1715590390.log /tmp/dm_test/shardddl1_1/dmctl.1715590381.log /tmp/dm_test/shardddl1_1/dmctl.1715590439.log /tmp/dm_test/shardddl1_1/dmctl.1715590437.log /tmp/dm_test/shardddl1_1/dmctl.1715590391.log /tmp/dm_test/shardddl1_1/dmctl.1715590367.log /tmp/dm_test/shardddl1_1/dmctl.1715590395.log /tmp/dm_test/shardddl1_1/dmctl.1715590393.log /tmp/dm_test/shardddl1_1/dmctl.1715590426.log /tmp/dm_test/shardddl1_1/dmctl.1715590353.log /tmp/dm_test/shardddl1_1/dmctl.1715590361.log /tmp/dm_test/shardddl1_1/dmctl.1715590387.log /tmp/dm_test/shardddl1_1/dmctl.1715590448.log /tmp/dm_test/shardddl1_1/dmctl.1715590457.log /tmp/dm_test/shardddl1_1/dmctl.1715590433.log /tmp/dm_test/shardddl1_1/dmctl.1715590414.log /tmp/dm_test/shardddl1_1/dmctl.1715590363.log /tmp/dm_test/shardddl1_1/dmctl.1715590366.log /tmp/dm_test/shardddl1_1/dmctl.1715590402.log /tmp/dm_test/shardddl1_1/dmctl.1715590441.log /tmp/dm_test/shardddl1_1/dmctl.1715590455.log /tmp/dm_test/shardddl1_1/dmctl.1715590352.log /tmp/dm_test/shardddl1_1/dmctl.1715590436.log /tmp/dm_test/shardddl1_1/dmctl.1715590411.log /tmp/dm_test/shardddl1_1/dmctl.1715590418.log /tmp/dm_test/shardddl1_1/dmctl.1715590443.log /tmp/dm_test/shardddl1_1/dmctl.1715590429.log /tmp/dm_test/shardddl1_1/dmctl.1715590397.log /tmp/dm_test/shardddl1_1/dmctl.1715590412.log /tmp/dm_test/shardddl1_1/dmctl.1715590446.log /tmp/dm_test/shardddl1_1/dmctl.1715590364.log /tmp/dm_test/shardddl1_1/worker2/log/stdout.log /tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715590379.log /tmp/dm_test/shardddl1_1/dmctl.1715590359.log /tmp/dm_test/shardddl1_1/dmctl.1715590434.log /tmp/dm_test/shardddl1_1/dmctl.1715590431.log /tmp/dm_test/shardddl1_1/sync_diff_stdout.log /tmp/dm_test/shardddl1_1/dmctl.1715590472.log /tmp/dm_test/shardddl1_1/dmctl.1715590470.log /tmp/dm_test/shardddl1_1/dmctl.1715590460.log /tmp/dm_test/shardddl1_1/dmctl.1715590435.log /tmp/dm_test/shardddl1_1/dmctl.1715590406.log /tmp/dm_test/shardddl1_1/dmctl.1715590469.log /tmp/dm_test/shardddl1_1/dmctl.1715590358.log /tmp/dm_test/shardddl1_1/dmctl.1715590396.log /tmp/dm_test/shardddl1_1/dmctl.1715590459.log /tmp/dm_test/shardddl1_1/dmctl.1715590425.log /tmp/dm_test/shardddl1_1/dmctl.1715590377.log /tmp/dm_test/shardddl1_1/dmctl.1715590454.log /tmp/dm_test/shardddl1_1/master/log/stdout.log /tmp/dm_test/shardddl1_1/master/log/dm-master.log /tmp/dm_test/shardddl2/dmctl.1715590644.log /tmp/dm_test/shardddl2/dmctl.1715590608.log /tmp/dm_test/shardddl2/dmctl.1715590484.log /tmp/dm_test/shardddl2/dmctl.1715590485.log /tmp/dm_test/shardddl2/dmctl.1715590592.log /tmp/dm_test/shardddl2/dmctl.1715590539.log /tmp/dm_test/shardddl2/dmctl.1715590517.log /tmp/dm_test/shardddl2/dmctl.1715590627.log /tmp/dm_test/shardddl2/worker1/log/stdout.log /tmp/dm_test/shardddl2/worker1/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715590591.log /tmp/dm_test/shardddl2/dmctl.1715590657.log /tmp/dm_test/shardddl2/dmctl.1715590611.log /tmp/dm_test/shardddl2/dmctl.1715590573.log /tmp/dm_test/shardddl2/dmctl.1715590563.log /tmp/dm_test/shardddl2/dmctl.1715590590.log /tmp/dm_test/shardddl2/dmctl.1715590531.log /tmp/dm_test/shardddl2/dmctl.1715590504.log /tmp/dm_test/shardddl2/dmctl.1715590638.log /tmp/dm_test/shardddl2/dmctl.1715590609.log /tmp/dm_test/shardddl2/dmctl.1715590566.log /tmp/dm_test/shardddl2/dmctl.1715590649.log /tmp/dm_test/shardddl2/dmctl.1715590628.log /tmp/dm_test/shardddl2/dmctl.1715590507.log /tmp/dm_test/shardddl2/dmctl.1715590501.log /tmp/dm_test/shardddl2/dmctl.1715590619.log /tmp/dm_test/shardddl2/dmctl.1715590607.log /tmp/dm_test/shardddl2/dmctl.1715590584.log /tmp/dm_test/shardddl2/dmctl.1715590494.log /tmp/dm_test/shardddl2/dmctl.1715590488.log /tmp/dm_test/shardddl2/dmctl.1715590508.log /tmp/dm_test/shardddl2/dmctl.1715590525.log /tmp/dm_test/shardddl2/dmctl.1715590630.log /tmp/dm_test/shardddl2/dmctl.1715590572.log /tmp/dm_test/shardddl2/dmctl.1715590519.log /tmp/dm_test/shardddl2/dmctl.1715590645.log /tmp/dm_test/shardddl2/dmctl.1715590540.log /tmp/dm_test/shardddl2/dmctl.1715590626.log /tmp/dm_test/shardddl2/dmctl.1715590562.log /tmp/dm_test/shardddl2/dmctl.1715590518.log /tmp/dm_test/shardddl2/worker2/log/stdout.log /tmp/dm_test/shardddl2/worker2/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715590565.log /tmp/dm_test/shardddl2/dmctl.1715590646.log /tmp/dm_test/shardddl2/sync_diff_stdout.log /tmp/dm_test/shardddl2/dmctl.1715590574.log /tmp/dm_test/shardddl2/dmctl.1715590534.log /tmp/dm_test/shardddl2/dmctl.1715590561.log /tmp/dm_test/shardddl2/dmctl.1715590600.log /tmp/dm_test/shardddl2/dmctl.1715590486.log /tmp/dm_test/shardddl2/dmctl.1715590618.log /tmp/dm_test/shardddl2/master/log/stdout.log /tmp/dm_test/shardddl2/master/log/dm-master.log /tmp/dm_test/downstream/tidb/log/tidb.log /tmp/dm_test/goroutines/stack/log/worker-8264.log /tmp/dm_test/goroutines/stack/log/master-8561.log /tmp/dm_test/goroutines/stack/log/worker-18262.log /tmp/dm_test/goroutines/stack/log/master-8461.log /tmp/dm_test/goroutines/stack/log/master-8761.log /tmp/dm_test/goroutines/stack/log/worker-8262.log /tmp/dm_test/goroutines/stack/log/master-8261.log /tmp/dm_test/goroutines/stack/log/worker-8263.log /tmp/dm_test/goroutines/stack/log/worker-18263.log /tmp/dm_test/goroutines/stack/log/master-8661.log /tmp/dm_test/goroutines/stack/log/master-8361.log
tar: Removing leading `/' from member names
/tmp/dm_test/shardddl1/dmctl.1715590339.log
/tmp/dm_test/shardddl1/dmctl.1715590198.log
/tmp/dm_test/shardddl1/dmctl.1715590204.log
/tmp/dm_test/shardddl1/dmctl.1715590109.log
/tmp/dm_test/shardddl1/dmctl.1715590168.log
/tmp/dm_test/shardddl1/dmctl.1715590287.log
/tmp/dm_test/shardddl1/dmctl.1715590326.log
/tmp/dm_test/shardddl1/dmctl.1715590074.log
/tmp/dm_test/shardddl1/dmctl.1715590190.log
/tmp/dm_test/shardddl1/dmctl.1715590340.log
/tmp/dm_test/shardddl1/dmctl.1715590193.log
/tmp/dm_test/shardddl1/dmctl.1715590162.log
/tmp/dm_test/shardddl1/dmctl.1715590192.log
/tmp/dm_test/shardddl1/dmctl.1715590276.log
/tmp/dm_test/shardddl1/dmctl.1715590194.log
/tmp/dm_test/shardddl1/dmctl.1715590255.log
/tmp/dm_test/shardddl1/dmctl.1715590188.log
/tmp/dm_test/shardddl1/dmctl.1715590184.log
/tmp/dm_test/shardddl1/worker1/log/stdout.log
/tmp/dm_test/shardddl1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715590211.log
/tmp/dm_test/shardddl1/dmctl.1715590296.log
/tmp/dm_test/shardddl1/dmctl.1715590304.log
/tmp/dm_test/shardddl1/dmctl.1715590267.log
/tmp/dm_test/shardddl1/dmctl.1715590269.log
/tmp/dm_test/shardddl1/dmctl.1715590176.log
/tmp/dm_test/shardddl1/dmctl.1715590174.log
/tmp/dm_test/shardddl1/dmctl.1715590301.log
/tmp/dm_test/shardddl1/dmctl.1715590289.log
/tmp/dm_test/shardddl1/dmctl.1715590191.log
/tmp/dm_test/shardddl1/dmctl.1715590169.log
/tmp/dm_test/shardddl1/dmctl.1715590266.log
/tmp/dm_test/shardddl1/dmctl.1715590327.log
/tmp/dm_test/shardddl1/dmctl.1715590288.log
/tmp/dm_test/shardddl1/dmctl.1715590166.log
/tmp/dm_test/shardddl1/dmctl.1715590209.log
/tmp/dm_test/shardddl1/dmctl.1715590295.log
/tmp/dm_test/shardddl1/dmctl.1715590335.log
/tmp/dm_test/shardddl1/dmctl.1715590175.log
/tmp/dm_test/shardddl1/dmctl.1715590325.log
/tmp/dm_test/shardddl1/dmctl.1715590186.log
/tmp/dm_test/shardddl1/dmctl.1715590277.log
/tmp/dm_test/shardddl1/dmctl.1715590308.log
/tmp/dm_test/shardddl1/dmctl.1715590311.log
/tmp/dm_test/shardddl1/dmctl.1715590173.log
/tmp/dm_test/shardddl1/dmctl.1715590278.log
/tmp/dm_test/shardddl1/dmctl.1715590323.log
/tmp/dm_test/shardddl1/dmctl.1715590172.log
/tmp/dm_test/shardddl1/dmctl.1715590080.log
/tmp/dm_test/shardddl1/dmctl.1715590140.log
/tmp/dm_test/shardddl1/dmctl.1715590075.log
/tmp/dm_test/shardddl1/dmctl.1715590337.log
/tmp/dm_test/shardddl1/dmctl.1715590216.log
/tmp/dm_test/shardddl1/dmctl.1715590254.log
/tmp/dm_test/shardddl1/dmctl.1715590187.log
/tmp/dm_test/shardddl1/dmctl.1715590334.log
/tmp/dm_test/shardddl1/dmctl.1715590195.log
/tmp/dm_test/shardddl1/dmctl.1715590108.log
/tmp/dm_test/shardddl1/dmctl.1715590203.log
/tmp/dm_test/shardddl1/dmctl.1715590210.log
/tmp/dm_test/shardddl1/worker2/log/stdout.log
/tmp/dm_test/shardddl1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715590316.log
/tmp/dm_test/shardddl1/dmctl.1715590342.log
/tmp/dm_test/shardddl1/sync_diff_stdout.log
/tmp/dm_test/shardddl1/dmctl.1715590251.log
/tmp/dm_test/shardddl1/dmctl.1715590205.log
/tmp/dm_test/shardddl1/dmctl.1715590081.log
/tmp/dm_test/shardddl1/dmctl.1715590322.log
/tmp/dm_test/shardddl1/dmctl.1715590307.log
/tmp/dm_test/shardddl1/dmctl.1715590170.log
/tmp/dm_test/shardddl1/dmctl.1715590264.log
/tmp/dm_test/shardddl1/dmctl.1715590144.log
/tmp/dm_test/shardddl1/dmctl.1715590299.log
/tmp/dm_test/shardddl1/dmctl.1715590252.log
/tmp/dm_test/shardddl1/dmctl.1715590319.log
/tmp/dm_test/shardddl1/dmctl.1715590314.log
/tmp/dm_test/shardddl1/dmctl.1715590104.log
/tmp/dm_test/shardddl1/dmctl.1715590145.log
/tmp/dm_test/shardddl1/dmctl.1715590315.log
/tmp/dm_test/shardddl1/master/log/stdout.log
/tmp/dm_test/shardddl1/master/log/dm-master.log
/tmp/dm_test/shardddl1_1/dmctl.1715590404.log
/tmp/dm_test/shardddl1_1/dmctl.1715590374.log
/tmp/dm_test/shardddl1_1/dmctl.1715590376.log
/tmp/dm_test/shardddl1_1/dmctl.1715590467.log
/tmp/dm_test/shardddl1_1/dmctl.1715590421.log
/tmp/dm_test/shardddl1_1/dmctl.1715590372.log
/tmp/dm_test/shardddl1_1/dmctl.1715590423.log
/tmp/dm_test/shardddl1_1/dmctl.1715590420.log
/tmp/dm_test/shardddl1_1/dmctl.1715590452.log
/tmp/dm_test/shardddl1_1/dmctl.1715590409.log
/tmp/dm_test/shardddl1_1/dmctl.1715590428.log
/tmp/dm_test/shardddl1_1/dmctl.1715590417.log
/tmp/dm_test/shardddl1_1/dmctl.1715590401.log
/tmp/dm_test/shardddl1_1/dmctl.1715590473.log
/tmp/dm_test/shardddl1_1/dmctl.1715590475.log
/tmp/dm_test/shardddl1_1/dmctl.1715590463.log
/tmp/dm_test/shardddl1_1/dmctl.1715590407.log
/tmp/dm_test/shardddl1_1/dmctl.1715590354.log
/tmp/dm_test/shardddl1_1/worker1/log/stdout.log
/tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715590419.log
/tmp/dm_test/shardddl1_1/dmctl.1715590468.log
/tmp/dm_test/shardddl1_1/dmctl.1715590453.log
/tmp/dm_test/shardddl1_1/dmctl.1715590357.log
/tmp/dm_test/shardddl1_1/dmctl.1715590371.log
/tmp/dm_test/shardddl1_1/dmctl.1715590447.log
/tmp/dm_test/shardddl1_1/dmctl.1715590383.log
/tmp/dm_test/shardddl1_1/dmctl.1715590451.log
/tmp/dm_test/shardddl1_1/dmctl.1715590456.log
/tmp/dm_test/shardddl1_1/dmctl.1715590438.log
/tmp/dm_test/shardddl1_1/dmctl.1715590388.log
/tmp/dm_test/shardddl1_1/dmctl.1715590415.log
/tmp/dm_test/shardddl1_1/dmctl.1715590369.log
/tmp/dm_test/shardddl1_1/dmctl.1715590465.log
/tmp/dm_test/shardddl1_1/dmctl.1715590350.log
/tmp/dm_test/shardddl1_1/dmctl.1715590462.log
/tmp/dm_test/shardddl1_1/dmctl.1715590386.log
/tmp/dm_test/shardddl1_1/dmctl.1715590400.log
/tmp/dm_test/shardddl1_1/dmctl.1715590444.log
/tmp/dm_test/shardddl1_1/dmctl.1715590384.log
/tmp/dm_test/shardddl1_1/dmctl.1715590390.log
/tmp/dm_test/shardddl1_1/dmctl.1715590381.log
/tmp/dm_test/shardddl1_1/dmctl.1715590439.log
/tmp/dm_test/shardddl1_1/dmctl.1715590437.log
/tmp/dm_test/shardddl1_1/dmctl.1715590391.log
/tmp/dm_test/shardddl1_1/dmctl.1715590367.log
/tmp/dm_test/shardddl1_1/dmctl.1715590395.log
/tmp/dm_test/shardddl1_1/dmctl.1715590393.log
/tmp/dm_test/shardddl1_1/dmctl.1715590426.log
/tmp/dm_test/shardddl1_1/dmctl.1715590353.log
/tmp/dm_test/shardddl1_1/dmctl.1715590361.log
/tmp/dm_test/shardddl1_1/dmctl.1715590387.log
/tmp/dm_test/shardddl1_1/dmctl.1715590448.log
/tmp/dm_test/shardddl1_1/dmctl.1715590457.log
/tmp/dm_test/shardddl1_1/dmctl.1715590433.log
/tmp/dm_test/shardddl1_1/dmctl.1715590414.log
/tmp/dm_test/shardddl1_1/dmctl.1715590363.log
/tmp/dm_test/shardddl1_1/dmctl.1715590366.log
/tmp/dm_test/shardddl1_1/dmctl.1715590402.log
/tmp/dm_test/shardddl1_1/dmctl.1715590441.log
/tmp/dm_test/shardddl1_1/dmctl.1715590455.log
/tmp/dm_test/shardddl1_1/dmctl.1715590352.log
/tmp/dm_test/shardddl1_1/dmctl.1715590436.log
/tmp/dm_test/shardddl1_1/dmctl.1715590411.log
/tmp/dm_test/shardddl1_1/dmctl.1715590418.log
/tmp/dm_test/shardddl1_1/dmctl.1715590443.log
/tmp/dm_test/shardddl1_1/dmctl.1715590429.log
/tmp/dm_test/shardddl1_1/dmctl.1715590397.log
/tmp/dm_test/shardddl1_1/dmctl.1715590412.log
/tmp/dm_test/shardddl1_1/dmctl.1715590446.log
/tmp/dm_test/shardddl1_1/dmctl.1715590364.log
/tmp/dm_test/shardddl1_1/worker2/log/stdout.log
/tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715590379.log
/tmp/dm_test/shardddl1_1/dmctl.1715590359.log
/tmp/dm_test/shardddl1_1/dmctl.1715590434.log
/tmp/dm_test/shardddl1_1/dmctl.1715590431.log
/tmp/dm_test/shardddl1_1/sync_diff_stdout.log
/tmp/dm_test/shardddl1_1/dmctl.1715590472.log
/tmp/dm_test/shardddl1_1/dmctl.1715590470.log
/tmp/dm_test/shardddl1_1/dmctl.1715590460.log
/tmp/dm_test/shardddl1_1/dmctl.1715590435.log
/tmp/dm_test/shardddl1_1/dmctl.1715590406.log
/tmp/dm_test/shardddl1_1/dmctl.1715590469.log
/tmp/dm_test/shardddl1_1/dmctl.1715590358.log
/tmp/dm_test/shardddl1_1/dmctl.1715590396.log
/tmp/dm_test/shardddl1_1/dmctl.1715590459.log
/tmp/dm_test/shardddl1_1/dmctl.1715590425.log
/tmp/dm_test/shardddl1_1/dmctl.1715590377.log
/tmp/dm_test/shardddl1_1/dmctl.1715590454.log
/tmp/dm_test/shardddl1_1/master/log/stdout.log
/tmp/dm_test/shardddl1_1/master/log/dm-master.log
/tmp/dm_test/shardddl2/dmctl.1715590644.log
/tmp/dm_test/shardddl2/dmctl.1715590608.log
/tmp/dm_test/shardddl2/dmctl.1715590484.log
/tmp/dm_test/shardddl2/dmctl.1715590485.log
/tmp/dm_test/shardddl2/dmctl.1715590592.log
/tmp/dm_test/shardddl2/dmctl.1715590539.log
/tmp/dm_test/shardddl2/dmctl.1715590517.log
/tmp/dm_test/shardddl2/dmctl.1715590627.log
/tmp/dm_test/shardddl2/worker1/log/stdout.log
/tmp/dm_test/shardddl2/worker1/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715590591.log
/tmp/dm_test/shardddl2/dmctl.1715590657.log
/tmp/dm_test/shardddl2/dmctl.1715590611.log
/tmp/dm_test/shardddl2/dmctl.1715590573.log
/tmp/dm_test/shardddl2/dmctl.1715590563.log
/tmp/dm_test/shardddl2/dmctl.1715590590.log
/tmp/dm_test/shardddl2/dmctl.1715590531.log
/tmp/dm_test/shardddl2/dmctl.1715590504.log
/tmp/dm_test/shardddl2/dmctl.1715590638.log
/tmp/dm_test/shardddl2/dmctl.1715590609.log
/tmp/dm_test/shardddl2/dmctl.1715590566.log
/tmp/dm_test/shardddl2/dmctl.1715590649.log
/tmp/dm_test/shardddl2/dmctl.1715590628.log
/tmp/dm_test/shardddl2/dmctl.1715590507.log
/tmp/dm_test/shardddl2/dmctl.1715590501.log
/tmp/dm_test/shardddl2/dmctl.1715590619.log
/tmp/dm_test/shardddl2/dmctl.1715590607.log
/tmp/dm_test/shardddl2/dmctl.1715590584.log
/tmp/dm_test/shardddl2/dmctl.1715590494.log
/tmp/dm_test/shardddl2/dmctl.1715590488.log
/tmp/dm_test/shardddl2/dmctl.1715590508.log
/tmp/dm_test/shardddl2/dmctl.1715590525.log
/tmp/dm_test/shardddl2/dmctl.1715590630.log
/tmp/dm_test/shardddl2/dmctl.1715590572.log
/tmp/dm_test/shardddl2/dmctl.1715590519.log
/tmp/dm_test/shardddl2/dmctl.1715590645.log
/tmp/dm_test/shardddl2/dmctl.1715590540.log
/tmp/dm_test/shardddl2/dmctl.1715590626.log
/tmp/dm_test/shardddl2/dmctl.1715590562.log
/tmp/dm_test/shardddl2/dmctl.1715590518.log
/tmp/dm_test/shardddl2/worker2/log/stdout.log
/tmp/dm_test/shardddl2/worker2/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715590565.log
/tmp/dm_test/shardddl2/dmctl.1715590646.log
/tmp/dm_test/shardddl2/sync_diff_stdout.log
/tmp/dm_test/shardddl2/dmctl.1715590574.log
/tmp/dm_test/shardddl2/dmctl.1715590534.log
/tmp/dm_test/shardddl2/dmctl.1715590561.log
/tmp/dm_test/shardddl2/dmctl.1715590600.log
/tmp/dm_test/shardddl2/dmctl.1715590486.log
/tmp/dm_test/shardddl2/dmctl.1715590618.log
/tmp/dm_test/shardddl2/master/log/stdout.log
/tmp/dm_test/shardddl2/master/log/dm-master.log
/tmp/dm_test/downstream/tidb/log/tidb.log
/tmp/dm_test/goroutines/stack/log/worker-8264.log
/tmp/dm_test/goroutines/stack/log/master-8561.log
/tmp/dm_test/goroutines/stack/log/worker-18262.log
/tmp/dm_test/goroutines/stack/log/master-8461.log
/tmp/dm_test/goroutines/stack/log/master-8761.log
/tmp/dm_test/goroutines/stack/log/worker-8262.log
/tmp/dm_test/goroutines/stack/log/master-8261.log
/tmp/dm_test/goroutines/stack/log/worker-8263.log
/tmp/dm_test/goroutines/stack/log/worker-18263.log
/tmp/dm_test/goroutines/stack/log/master-8661.log
/tmp/dm_test/goroutines/stack/log/master-8361.log
+ ls -alh log-G07.tar.gz
-rw-r--r-- 1 jenkins jenkins 668K May 13 16:58 log-G07.tar.gz
[Pipeline] archiveArtifacts
Archiving artifacts
[Pipeline] }
[Pipeline] // withCredentials
dmctl test cmd: "query-status load_task1"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "query-status load_task2"
got=1 expected=1
got=1 expected=1
test worker restart
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
Sending interrupt signal to process
Killing processes
wait process dm-worker1 exit...
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
kill finished with exit code 0
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 2
[Pipeline] // dir
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
[Pipeline] // cache
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // container
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // podTemplate
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 2
Finished: FAILURE