Skip to content

Console Output

Skipping 1,148 KB.. Full Log
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl4_1/source1.yaml"
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8262 is alive
[Tue May  7 10:42:27 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
restart dm-master
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:42:28 CST 2024] <<<<<< test case checkpoint_transaction success! >>>>>>
start running case: [check_task] script: [/home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:42:28 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   341  100   216  100   125  66055  38226 --:--:-- --:--:-- --:--:-- 72000
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_optimistic/source1.yaml"
got=1 expected=1
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 1\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
wait process dm-master exit...
wait process dm-master.test exit...
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_optimistic/source2.yaml"
[Tue May  7 10:42:28 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master exit...
process dm-master already exit
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   407  100   282  100   125  67935  30113 --:--:-- --:--:-- --:--:-- 70500
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-task.yaml --remove-meta"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl4_1/source2.yaml"
rpc addr 127.0.0.1:8261 is alive
[Tue May  7 10:42:30 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=1 expected=1
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 1\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff successfully
ERROR 1146 (42S02) at line 1: Table 'sharding_seq_tmp.t1' doesn't exist
run tidb sql failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/source1.yaml"
[Tue May  7 10:42:31 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Tue May  7 10:42:31 CST 2024] <<<<<< start DM-TABLE_CHECKPOINT_BACKWARD optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "check-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-noshard.yaml"
dmctl test cmd: "check-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-sharding.yaml"
wait process dm-master.test exit...
--> start test_privileges_can_migrate...
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-priv.yaml --remove-meta"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "query-status test"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
got=2 expected=2
wait process dm-master.test exit...
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   398  100   273  100   125  67424  30871 --:--:-- --:--:-- --:--:-- 91000
dmctl test cmd: "pause-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=2 expected=2
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   512  100   387  100   125  93772  30288 --:--:-- --:--:-- --:--:-- 96750
dmctl test cmd: "resume-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
dmctl test cmd: "query-status test"
got=3 expected=3
restart dm-worker1
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
wait process dm-worker1 exit...
check diff successfully
dmctl test cmd: "query-status test"
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8261 is alive
check diff successfully
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
clean source table
pass test_privileges_can_migrate
--> start test_privilege_precheck...
dmctl test cmd: "check-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-priv.yaml"
[Tue May  7 10:42:35 CST 2024] <<<<<< finish DM-RECOVER_LOCK optimistic >>>>>>
run DM_DropAddColumn case #0
[Tue May  7 10:42:35 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "check-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-priv.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-worker1 exit...
process dm-worker1 already exit
[Tue May  7 10:42:35 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "check-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/check_task/conf/task-priv.yaml"
pass test_privilege_precheck
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
dmctl test cmd: "resume-task test"
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "pause-task test"
got=1 expected=1
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "shard-ddl-lock unlock non-exist-task-`test_db`.`test_table`"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
import prepare data
start DM worker and master standalone cluster
[Tue May  7 10:42:37 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master1.toml >>>>>>
[Tue May  7 10:42:37 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master2.toml >>>>>>
[Tue May  7 10:42:37 CST 2024] <<<<<< START DM-MASTER on port 8461, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-master3.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 0-th time, will retry again
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
got=3 expected=3
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
check diff failed 1-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 1-th time, will retry again
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_dm_integration_test-1920/tiflow-dm already exists)
check diff successfully
dmctl test cmd: "pause-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=2 expected=2
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125  78499  25620 --:--:-- --:--:-- --:--:-- 95750
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   291  100   166  100   125  45084  33948 --:--:-- --:--:-- --:--:-- 55333
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   538  100   370  100   168  65812  29882 --:--:-- --:--:-- --:--:-- 74000
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125   101k  34013 --:--:-- --:--:-- --:--:--  124k
dmctl test cmd: "binlog-schema list -s mysql-replica-01,mysql-replica-02 sequence_sharding_optimistic sharding_seq_opt t2"
dmctl test cmd: "binlog-schema delete -s mysql-replica-01 sequence_sharding_optimistic sharding_seq_opt t2"
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
dmctl test cmd: "binlog-schema update -s mysql-replica-01 sequence_sharding_optimistic sharding_seq_opt t1 /tmp/dm_test/sequence_sharding_optimistic/schema.sql"
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125  93528  30525 --:--:-- --:--:-- --:--:--  124k
{
  "result": true,
  "msg": "",
  "sources": [
    {
      "result": true,
      "msg": "CREATE TABLE `t1` ( `id` bigint(20) NOT NULL, `c2` varchar(20) DEFAULT NULL, `c3` bigint(11) DEFAULT NULL, PRIMARY KEY (`id`) /*T![clustered_index] CLUSTERED */) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin",
      "source": "mysql-replica-01",
      "worker": "worker1"
    }
  ]
}dmctl test cmd: "resume-task sequence_sharding_optimistic"
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
check diff failed 2-th time, retry later
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=3 expected=3
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
[Pipeline] // podTemplate
[Pipeline] }
rpc addr 127.0.0.1:8261 is alive
rpc addr 127.0.0.1:8361 is alive
rpc addr 127.0.0.1:8461 is alive
[Tue May  7 10:42:42 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
operate mysql config to worker
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source1.yaml"
wait process dm-master.test exit...
process dm-master.test already exit
check diff failed 3-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
start DM task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task.yaml "
wait process dm-worker.test exit...
use sync_diff_inspector to check full dump loader
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
check diff successfully
flush logs to force rotate binlog file
apply increment data before restart dm-worker to ensure entering increment phase
use sync_diff_inspector to check increment data
wait process dm-worker.test exit...
dmctl test cmd: "resume-task test"
check diff failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
[Tue May  7 10:42:46 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #1
[Tue May  7 10:42:46 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:42:47 CST 2024] <<<<<< test case sequence_sharding_optimistic success! >>>>>>
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_dm_integration_test-1920/tiflow-dm already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
dmctl test cmd: "query-status test"
got=2 expected=2
[Pipeline] // node
check diff successfully
dmctl test cmd: "operate-source create /tmp/dm_test/ha_cases/source2.yaml"
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
check diff successfully
restart dm-master
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
[Tue May  7 10:42:48 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/0" count: 0 != expected: 1, failed the 5-th time, will retry again
wait process dm-master exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
wait process dm-master exit...
process dm-master already exit
got=1 expected=1
got=1 expected=1
dmctl test cmd: "query-status"
got=2 expected=2
kill worker2
wait process dm-worker2 exit...
dmctl test cmd: "resume-task test"
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "pause-task test"
[Tue May  7 10:42:52 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-worker2 exit...
process dm-worker2 already exit
dmctl test cmd: "query-status"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "stop-task test2"
got=1 expected=1
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/ha_cases/conf/standalone-task2.yaml"
got=1 expected=1
dmctl test cmd: "query-status test"
got=1 expected=1
[Tue May  7 10:42:53 CST 2024] <<<<<< finish test_standalone_running >>>>>>
3 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-master.test exit...
rpc addr 127.0.0.1:8261 is alive
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 1-th time, will retry again
dmctl test cmd: "shard-ddl-lock"
wait process dm-master.test exit...
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff failed 1-th time, retry later
check diff failed 2-th time, retry later
wait process dm-master.test exit...
check diff failed 2-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
check diff failed 3-th time, retry later
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 5-th time, will retry again
check diff successfully
dmctl test cmd: "stop-task test"
[Tue May  7 10:43:05 CST 2024] <<<<<< finish DM-TABLE_CHECKPOINT_BACKWARD optimistic >>>>>>
[Tue May  7 10:43:05 CST 2024] <<<<<< start DM-RESYNC_NOT_FLUSHED optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
[Tue May  7 10:43:06 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #2
[Tue May  7 10:43:06 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker1
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-master.test exit...
got=2 expected=2
check diff successfully
restart dm-master
dmctl test cmd: "query-status test"
got=1 expected=1
<<<<<< test_source_and_target_with_empty_gtid success! >>>>>>
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process worker1 exit...
wait process dm-worker.test exit...
wait process dm-master.test exit...
wait process dm-master exit...
wait process worker1 exit...
process worker1 already exit
[Tue May  7 10:43:09 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=1 expected=1
got=1 expected=1
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8262 is alive
restart dm-worker2
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "pause-task test"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:43:11 CST 2024] <<<<<< test case all_mode success! >>>>>>
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_dm_integration_test-1920/tiflow-dm already exists)
[Pipeline] // cache
[Pipeline] }
wait process worker2 exit...
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
wait process dm-master.test exit...
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Tue May  7 10:43:12 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
wait process worker2 exit...
process worker2 already exit
[Tue May  7 10:43:12 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 0-th time, will retry again
rpc addr 127.0.0.1:8263 is alive
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "shard-ddl-lock"
wait process dm-master.test exit...
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 1-th time, will retry again
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-master.test exit...
check diff failed 2-th time, retry later
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
wait process dm-master.test exit...
check diff failed 3-th time, retry later
wait process dm-master.test exit...
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Tue May  7 10:43:23 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #3
[Tue May  7 10:43:23 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 5-th time, will retry again
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
wait process dm-master.test exit...
check log contain failed 1-th time, retry later
wait process dm-master.test exit...
[Tue May  7 10:43:24 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Tue May  7 10:43:25 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 4\/0\/0" count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-master.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
restart dm-master
rpc addr 127.0.0.1:8263 is alive
wait process dm-master.test exit...
wait process dm-master exit...
got=1 expected=1
got=1 expected=1
wait process dm-master.test exit...
wait process dm-master exit...
process dm-master already exit
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "pause-task test"
wait process dm-master.test exit...
[Tue May  7 10:43:30 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 1-th time, will retry again
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
wait process dm-master.test exit...
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff failed 2-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
wait process dm-master.test exit...
check diff failed 3-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
wait process dm-master.test exit...
wait process dm-master.test exit...
check diff failed at last
dmctl test cmd: "binlog skip test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 5-th time, will retry again
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
[Tue May  7 10:43:42 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #4
[Tue May  7 10:43:42 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
check log contain failed 1-th time, retry later
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 5\/0\/0" count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-master.test exit...
wait process dm-master.test exit...
got=1 expected=1
got=1 expected=1
wait process dm-master.test exit...
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
restart dm-master
wait process dm-master exit...
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-master exit...
process dm-master already exit
wait process dm-master.test exit...
check diff successfully
restart dm-worker1
wait process worker1 exit...
wait process dm-master.test exit...
wait process worker1 exit...
process worker1 already exit
[Tue May  7 10:43:50 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Tue May  7 10:43:50 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 1-th time, will retry again
rpc addr 127.0.0.1:8262 is alive
restart dm-worker2
wait process dm-master.test exit...
wait process worker2 exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-master.test exit...
wait process worker2 exit...
process worker2 already exit
[Tue May  7 10:43:53 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
got=1 expected=1
check diff failed 1-th time, retry later
wait process dm-master.test exit...
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "stop-task test"
[Tue May  7 10:43:54 CST 2024] <<<<<< finish DM-RESYNC_NOT_FLUSHED optimistic >>>>>>
[Tue May  7 10:43:54 CST 2024] <<<<<< start DM-RESYNC_TXN_INTERRUPT optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 3-th time, will retry again
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:43:55 CST 2024] <<<<<< test case ha_cases success! >>>>>>
start running case: [http_proxies] script: [/home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
test dm grpc proxy env setting checking for http_proxy=http://127.0.0.1:8080
[Tue May  7 10:43:55 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
restart dm-worker1
check diff failed 2-th time, retry later
wait process worker1 exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 4-th time, will retry again
wait process worker1 exit...
process worker1 already exit
rpc addr 127.0.0.1:8261 is alive
tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Tue May  7 10:43:57 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
check diff failed 3-th time, retry later
[Tue May  7 10:43:58 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 5-th time, will retry again
rpc addr 127.0.0.1:8262 is alive
restart dm-worker2
check diff failed at last
dmctl test cmd: "binlog skip test"
wait process worker2 exit...
got=2 expected=2
got=1 expected=1
dmctl test cmd: "pause-task test"
dmctl test cmd: "resume-task test"
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
[Tue May  7 10:44:00 CST 2024] <<<<<< finish DM-DropAddColumn optimistic >>>>>>
run DM_DropAddColumn case #5
[Tue May  7 10:44:00 CST 2024] <<<<<< start DM-DropAddColumn optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
process dm-master.test already exit
wait process worker2 exit...
process worker2 already exit
[Tue May  7 10:44:01 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 6\/0\/0" count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8263 is alive
begin;
insert into shardddl1.tb2 values (1,1);
insert into shardddl1.tb2 values (2,2);
insert into shardddl1.tb2 values (3,3);
insert into shardddl1.tb2 values (4,4);
insert into shardddl1.tb2 values (5,5);
insert into shardddl1.tb2 values (6,6);
insert into shardddl1.tb2 values (7,7);
insert into shardddl1.tb2 values (8,8);
insert into shardddl1.tb2 values (9,9);
insert into shardddl1.tb2 values (10,10);
commit;
begin;
insert into shardddl1.t_1 values (11,11);
insert into shardddl1.t_1 values (12,12);
insert into shardddl1.t_1 values (13,13);
insert into shardddl1.t_1 values (14,14);
insert into shardddl1.t_1 values (15,15);
insert into shardddl1.t_1 values (16,16);
insert into shardddl1.t_1 values (17,17);
insert into shardddl1.t_1 values (18,18);
insert into shardddl1.t_1 values (19,19);
insert into shardddl1.t_1 values (20,20);
insert into shardddl1.t_1 values (21,21);
insert into shardddl1.t_1 values (22,22);
insert into shardddl1.t_1 values (23,23);
insert into shardddl1.t_1 values (24,24);
insert into shardddl1.t_1 values (25,25);
insert into shardddl1.t_1 values (26,26);
insert into shardddl1.t_1 values (27,27);
insert into shardddl1.t_1 values (28,28);
insert into shardddl1.t_1 values (29,29);
insert into shardddl1.t_1 values (30,30);
insert into shardddl1.t_1 values (31,31);
insert into shardddl1.t_1 values (32,32);
insert into shardddl1.t_1 values (33,33);
insert into shardddl1.t_1 values (34,34);
insert into shardddl1.t_1 values (35,35);
insert into shardddl1.t_1 values (36,36);
insert into shardddl1.t_1 values (37,37);
insert into shardddl1.t_1 values (38,38);
insert into shardddl1.t_1 values (39,39);
insert into shardddl1.t_1 values (40,40);
insert into shardddl1.t_1 values (41,41);
insert into shardddl1.t_1 values (42,42);
insert into shardddl1.t_1 values (43,43);
insert into shardddl1.t_1 values (44,44);
insert into shardddl1.t_1 values (45,45);
insert into shardddl1.t_1 values (46,46);
insert into shardddl1.t_1 values (47,47);
insert into shardddl1.t_1 values (48,48);
insert into shardddl1.t_1 values (49,49);
insert into shardddl1.t_1 values (50,50);
commit;
begin;
insert into shardddl1.tb1 values (51,51);
insert into shardddl1.tb1 values (52,52);
insert into shardddl1.tb1 values (53,53);
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
insert into shardddl1.tb1 values (54,54);
insert into shardddl1.tb1 values (55,55);
insert into shardddl1.tb1 values (56,56);
insert into shardddl1.tb1 values (57,57);
insert into shardddl1.tb1 values (58,58);
insert into shardddl1.tb1 values (59,59);
insert into shardddl1.tb1 values (60,60);
commit;
begin;
insert into shardddl1.t_1 values (61,61);
insert into shardddl1.t_1 values (62,62);
insert into shardddl1.t_1 values (63,63);
insert into shardddl1.t_1 values (64,64);
insert into shardddl1.t_1 values (65,65);
insert into shardddl1.t_1 values (66,66);
insert into shardddl1.t_1 values (67,67);
insert into shardddl1.t_1 values (68,68);
insert into shardddl1.t_1 values (69,69);
insert into shardddl1.t_1 values (70,70);
commit;
check diff failed 1-th time, retry later
check log contain failed 1-th time, retry later
wait process dm-worker.test exit...
got=1 expected=1
got=1 expected=1
--> test duplicate auto-incr pk
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-worker.test exit...
check diff failed 2-th time, retry later
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
dmctl test cmd: "query-status test"
got=3 expected=3
got=2 expected=2
restart dm-master
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
test dm grpc proxy env setting checking for https_proxy=https://127.0.0.1:8080
[Tue May  7 10:44:06 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master exit...
wait process dm-worker.test exit...
check diff failed 3-th time, retry later
wait process dm-master exit...
process dm-master already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
check diff successfully
restart dm-worker1
rpc addr 127.0.0.1:8261 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Tue May  7 10:44:08 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Tue May  7 10:44:09 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:44:09 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
wait process worker1 exit...
rpc addr 127.0.0.1:8261 is alive
[Tue May  7 10:44:10 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process worker1 exit...
process worker1 already exit
[Tue May  7 10:44:10 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Tue May  7 10:44:11 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
restart dm-worker2
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 0-th time, will retry again
wait process dm-master.test exit...
rpc addr 127.0.0.1:8263 is alive
wait process worker2 exit...
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source2.yaml"
wait process dm-master.test exit...
process dm-master.test already exit
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 1-th time, will retry again
wait process dm-worker.test exit...
wait process worker2 exit...
process worker2 already exit
[Tue May  7 10:44:13 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/sharding-task.yaml --remove-meta"
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8263 is alive
begin;
insert into shardddl1.tb2 values (101,101);
insert into shardddl1.tb2 values (102,102);
insert into shardddl1.tb2 values (103,103);
insert into shardddl1.tb2 values (104,104);
insert into shardddl1.tb2 values (105,105);
insert into shardddl1.tb2 values (106,106);
insert into shardddl1.tb2 values (107,107);
insert into shardddl1.tb2 values (108,108);
insert into shardddl1.tb2 values (109,109);
insert into shardddl1.tb2 values (110,110);
commit;
begin;
insert into shardddl1.tb1 values (111,111);
insert into shardddl1.tb1 values (112,112);
insert into shardddl1.tb1 values (113,113);
insert into shardddl1.tb1 values (114,114);
insert into shardddl1.tb1 values (115,115);
insert into shardddl1.tb1 values (116,116);
insert into shardddl1.tb1 values (117,117);
insert into shardddl1.tb1 values (118,118);
insert into shardddl1.tb1 values (119,119);
insert into shardddl1.tb1 values (120,120);
commit;
begin;
insert into shardddl1.tb2 values (121,121);
insert into shardddl1.tb2 values (122,122);
insert into shardddl1.tb2 values (123,123);
insert into shardddl1.tb2 values (124,124);
insert into shardddl1.tb2 values (125,125);
insert into shardddl1.tb2 values (126,126);
insert into shardddl1.tb2 values (127,127);
insert into shardddl1.tb2 values (128,128);
insert into shardddl1.tb2 values (129,129);
insert into shardddl1.tb2 values (130,130);
commit;
begin;
insert into shardddl1.t_1 values (131,131);
insert into shardddl1.t_1 values (132,132);
insert into shardddl1.t_1 values (133,133);
insert into shardddl1.t_1 values (134,134);
insert into shardddl1.t_1 values (135,135);
insert into shardddl1.t_1 values (136,136);
insert into shardddl1.t_1 values (137,137);
insert into shardddl1.t_1 values (138,138);
insert into shardddl1.t_1 values (139,139);
insert into shardddl1.t_1 values (140,140);
commit;
check diff successfully
begin;
insert into shardddl1.tb2 values (201,201);
insert into shardddl1.tb2 values (202,202);
insert into shardddl1.tb2 values (203,203);
insert into shardddl1.tb2 values (204,204);
insert into shardddl1.tb2 values (205,205);
insert into shardddl1.tb2 values (206,206);
insert into shardddl1.tb2 values (207,207);
insert into shardddl1.tb2 values (208,208);
insert into shardddl1.tb2 values (209,209);
insert into shardddl1.tb2 values (210,210);
commit;
begin;
insert into shardddl1.tb1 values (211,211);
insert into shardddl1.tb1 values (212,212);
insert into shardddl1.tb1 values (213,213);
insert into shardddl1.tb1 values (214,214);
insert into shardddl1.tb1 values (215,215);
insert into shardddl1.tb1 values (216,216);
insert into shardddl1.tb1 values (217,217);
insert into shardddl1.tb1 values (218,218);
insert into shardddl1.tb1 values (219,219);
insert into shardddl1.tb1 values (220,220);
commit;
begin;
insert into shardddl1.tb2 values (221,221);
insert into shardddl1.tb2 values (222,222);
insert into shardddl1.tb2 values (223,223);
insert into shardddl1.tb2 values (224,224);
insert into shardddl1.tb2 values (225,225);
insert into shardddl1.tb2 values (226,226);
insert into shardddl1.tb2 values (227,227);
insert into shardddl1.tb2 values (228,228);
insert into shardddl1.tb2 values (229,229);
insert into shardddl1.tb2 values (230,230);
commit;
begin;
insert into shardddl1.t_1 values (231,231);
insert into shardddl1.t_1 values (232,232);
insert into shardddl1.t_1 values (233,233);
insert into shardddl1.t_1 values (234,234);
insert into shardddl1.t_1 values (235,235);
insert into shardddl1.t_1 values (236,236);
insert into shardddl1.t_1 values (237,237);
insert into shardddl1.t_1 values (238,238);
insert into shardddl1.t_1 values (239,239);
insert into shardddl1.t_1 values (240,240);
commit;
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 2-th time, will retry again
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
test dm grpc proxy env setting checking for no_proxy=localhost,127.0.0.1
[Tue May  7 10:44:18 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check diff successfully
dmctl test cmd: "stop-task test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 3-th time, will retry again
[Tue May  7 10:44:18 CST 2024] <<<<<< finish DM-RESYNC_TXN_INTERRUPT optimistic >>>>>>
[Tue May  7 10:44:18 CST 2024] <<<<<< start DM-STRICT_OPTIMISTIC_SINGLE_SOURCE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/single-source-strict-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=1 expected=2
command: query-status test "processedRowsStatus": "insert\/update\/delete: 3\/0\/0" count: 1 != expected: 2, failed the 0-th time, will retry again
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 4-th time, will retry again
rpc addr 127.0.0.1:8261 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
[Tue May  7 10:44:20 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/http_proxies/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=1 expected=1
got=1 expected=1
dmctl test cmd: "stop-task test"
[Tue May  7 10:44:21 CST 2024] <<<<<< finish DM-STRICT_OPTIMISTIC_SINGLE_SOURCE optimistic >>>>>>
[Tue May  7 10:44:21 CST 2024] <<<<<< start DM-STRICT_OPTIMISTIC_DOUBLE_SOURCE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-strict-optimistic.yaml --remove-meta"
got=2 expected=2
got=1 expected=2
command: query-status test pendingRowsStatus": "insert\/update\/delete: 0\/0\/0 count: 1 != expected: 2, failed the 1-th time, will retry again
rpc addr 127.0.0.1:8262 is alive
./tests/_utils/check_log_contains: line 15: [: proxy: integer expression expected
./tests/_utils/check_log_contains: line 21: [: proxy: integer expression expected
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 5-th time, will retry again
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
got=1 expected=1
dmctl test cmd: "stop-task test"
got=2 expected=2
got=2 expected=2
got=2 expected=2
[Tue May  7 10:44:23 CST 2024] <<<<<< finish DM-STRICT_OPTIMISTIC_DOUBLE_SOURCE optimistic >>>>>>
[Tue May  7 10:44:23 CST 2024] <<<<<< start DM-131 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
process dm-master.test already exit
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
dmctl test cmd: "query-status test"
wait process dm-worker.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 6-th time, will retry again
got=2 expected=2
check diff successfully
wait process dm-master.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
[Tue May  7 10:44:26 CST 2024] <<<<<< finish DM-131 optimistic >>>>>>
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 7-th time, will retry again
[Tue May  7 10:44:27 CST 2024] <<<<<< start DM-132 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 8-th time, will retry again
wait process dm-worker.test exit...
check diff failed 1-th time, retry later
wait process dm-worker.test exit...
process dm-worker.test already exit
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:44:29 CST 2024] <<<<<< test case http_proxies success! >>>>>>
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_dm_integration_test-1920/tiflow-dm already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Tue May  7 10:44:29 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Pipeline] // stage
[Pipeline] }
check diff successfully
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8261 is alive
[Tue May  7 10:44:31 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 9-th time, will retry again
[Tue May  7 10:44:31 CST 2024] <<<<<< finish DM-132 pessimistic >>>>>>
[Tue May  7 10:44:31 CST 2024] <<<<<< start DM-132 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Tue May  7 10:44:32 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
{
    "result": true,
    "msg": "",
    "sources": [
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-01",
                "worker": "worker1",
                "result": null,
                "relayStatus": null
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Paused",
                    "unit": "Sync",
                    "result": {
                        "isCanceled": false,
                        "errors": [
                            {
                                "ErrCode": 42501,
                                "ErrClass": "ha",
                                "ErrScope": "internal",
                                "ErrLevel": "high",
                                "Message": "startLocation: [position: (dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 42228), gtid-set: c9c6fcd6-0c19-11ef-96cb-2e73d8da641e:1-194], endLocation: [position: (dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 42353), gtid-set: c9c6fcd6-0c19-11ef-96cb-2e73d8da641e:1-195], origin SQL: [alter table shardddl1.tb1 add column b int after a]: fail to do etcd txn operation: txn commit failed",
                                "RawCause": "rpc error: code = Unavailable desc = error reading from server: EOF",
                                "Workaround": "Please check dm-master's node status and the network between this node and dm-master"
                            }
                        ],
                        "detail": null
                    },
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "12",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 42353)",
                        "masterBinlogGtid": "c9c6fcd6-0c19-11ef-96cb-2e73d8da641e:1-195",
                        "syncerBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 42163)",
                        "syncerBinlogGtid": "c9c6fcd6-0c19-11ef-96cb-2e73d8da641e:1-194",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "remote",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "12",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        },
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-02",
                "worker": "worker2",
                "result": null,
                "relayStatus": {
                    "masterBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 39206)",
                    "masterBinlogGtid": "ca252550-0c19-11ef-a48c-2e73d8da641e:1-167",
                    "relaySubDir": "ca252550-0c19-11ef-a48c-2e73d8da641e.000001",
                    "relayBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 39206)",
                    "relayBinlogGtid": "ca252550-0c19-11ef-a48c-2e73d8da641e:1-167",
                    "relayCatchUpMaster": true,
                    "stage": "Running",
                    "result": null
                }
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Running",
                    "unit": "Sync",
                    "result": null,
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "6",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin.000001, 39206)",
                        "masterBinlogGtid": "ca252550-0c19-11ef-a48c-2e73d8da641e:1-167",
                        "syncerBinlog": "(dm-it-c8d81ac4-0391-4f95-ad8f-ae0375d18f89-9m9cf-3nz74-bin|000001.000001, 38926)",
                        "syncerBinlogGtid": "ca252550-0c19-11ef-a48c-2e73d8da641e:1-166",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "local",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "6",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        }
    ]
}
PASS
coverage: 3.8% of statements in github.com/pingcap/tiflow/dm/...
curl: (7) Failed connect to 127.0.0.1:8361; Connection refused
curl: (7) Failed connect to 127.0.0.1:8461; Connection refused
curl: (7) Failed connect to 127.0.0.1:8561; Connection refused
curl: (7) Failed connect to 127.0.0.1:8661; Connection refused
curl: (7) Failed connect to 127.0.0.1:8761; Connection refused
curl: (7) Failed connect to 127.0.0.1:8264; Connection refused
curl: (7) Failed connect to 127.0.0.1:18262; Connection refused
curl: (7) Failed connect to 127.0.0.1:18263; Connection refused
make: *** [dm_integration_test_in_group] Error 1
[Pipeline] }
Cache not saved (inner-step execution failed)
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-task-standalone-long-interval.yaml --remove-meta"
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
Post stage
[Pipeline] sh
[Tue May  7 10:44:33 CST 2024] <<<<<< finish DM-132 optimistic >>>>>>
+ ls /tmp/dm_test
cov.shardddl1.dmctl.1715049248.677.out
cov.shardddl1.dmctl.1715049249.794.out
cov.shardddl1.dmctl.1715049254.1072.out
cov.shardddl1.dmctl.1715049255.1110.out
cov.shardddl1.dmctl.1715049287.4313.out
cov.shardddl1.dmctl.1715049291.4626.out
cov.shardddl1.dmctl.1715049292.4667.out
cov.shardddl1.dmctl.1715049323.5161.out
cov.shardddl1.dmctl.1715049327.5470.out
cov.shardddl1.dmctl.1715049328.5515.out
cov.shardddl1.dmctl.1715049345.6118.out
cov.shardddl1.dmctl.1715049349.6435.out
cov.shardddl1.dmctl.1715049350.6474.out
cov.shardddl1.dmctl.1715049350.6560.out
cov.shardddl1.dmctl.1715049351.6697.out
cov.shardddl1.dmctl.1715049352.6729.out
cov.shardddl1.dmctl.1715049354.6840.out
cov.shardddl1.dmctl.1715049355.6984.out
cov.shardddl1.dmctl.1715049356.7026.out
cov.shardddl1.dmctl.1715049356.7145.out
cov.shardddl1.dmctl.1715049357.7181.out
cov.shardddl1.dmctl.1715049361.7280.out
cov.shardddl1.dmctl.1715049361.7315.out
cov.shardddl1.dmctl.1715049362.7349.out
cov.shardddl1.dmctl.1715049369.7539.out
cov.shardddl1.dmctl.1715049371.7600.out
cov.shardddl1.dmctl.1715049371.7633.out
cov.shardddl1.dmctl.1715049371.7664.out
cov.shardddl1.dmctl.1715049372.7807.out
cov.shardddl1.dmctl.1715049373.7841.out
cov.shardddl1.dmctl.1715049376.8032.out
cov.shardddl1.dmctl.1715049377.8173.out
cov.shardddl1.dmctl.1715049379.8204.out
cov.shardddl1.dmctl.1715049379.8325.out
cov.shardddl1.dmctl.1715049379.8362.out
cov.shardddl1.dmctl.1715049380.8504.out
cov.shardddl1.dmctl.1715049381.8546.out
cov.shardddl1.dmctl.1715049381.8641.out
cov.shardddl1.dmctl.1715049385.8769.out
cov.shardddl1.dmctl.1715049385.8806.out
cov.shardddl1.dmctl.1715049385.8838.out
cov.shardddl1.dmctl.1715049390.9105.out
cov.shardddl1.dmctl.1715049390.9152.out
cov.shardddl1.dmctl.1715049391.9198.out
cov.shardddl1.dmctl.1715049396.9329.out
cov.shardddl1.dmctl.1715049396.9474.out
cov.shardddl1.dmctl.1715049398.9504.out
cov.shardddl1.dmctl.1715049402.9638.out
cov.shardddl1.dmctl.1715049437.9919.out
cov.shardddl1.dmctl.1715049437.9962.out
cov.shardddl1.dmctl.1715049439.10010.out
cov.shardddl1.dmctl.1715049441.10102.out
cov.shardddl1.dmctl.1715049441.10137.out
cov.shardddl1.dmctl.1715049450.10311.out
cov.shardddl1.dmctl.1715049450.10347.out
cov.shardddl1.dmctl.1715049450.10381.out
cov.shardddl1.dmctl.1715049453.10524.out
cov.shardddl1.dmctl.1715049454.10560.out
cov.shardddl1.dmctl.1715049456.10650.out
cov.shardddl1.dmctl.1715049456.10684.out
cov.shardddl1.dmctl.1715049464.10865.out
cov.shardddl1.dmctl.1715049464.10901.out
cov.shardddl1.dmctl.1715049464.10942.out
cov.shardddl1.dmctl.1715049464.11084.out
cov.shardddl1.dmctl.1715049465.11115.out
cov.shardddl1.dmctl.1715049475.11629.out
cov.shardddl1.dmctl.1715049475.11776.out
cov.shardddl1.dmctl.1715049476.11809.out
cov.shardddl1.dmctl.1715049479.12215.out
cov.shardddl1.dmctl.1715049480.12355.out
cov.shardddl1.dmctl.1715049481.12388.out
cov.shardddl1.dmctl.1715049484.12683.out
cov.shardddl1.dmctl.1715049484.12826.out
cov.shardddl1.dmctl.1715049486.12857.out
cov.shardddl1.dmctl.1715049486.13111.out
cov.shardddl1.dmctl.1715049490.13421.out
cov.shardddl1.dmctl.1715049491.13472.out
cov.shardddl1.dmctl.1715049493.13525.out
cov.shardddl1.dmctl.1715049493.13559.out
cov.shardddl1.dmctl.1715049497.13891.out
cov.shardddl1.dmctl.1715049498.13938.out
cov.shardddl1.dmctl.1715049498.14067.out
cov.shardddl1.dmctl.1715049502.14378.out
cov.shardddl1.dmctl.1715049504.14415.out
cov.shardddl1.dmctl.1715049504.14453.out
cov.shardddl1.dmctl.1715049504.14492.out
cov.shardddl1.dmctl.1715049506.14634.out
cov.shardddl1.dmctl.1715049507.14663.out
cov.shardddl1.dmctl.1715049507.14738.out
cov.shardddl1.dmctl.1715049508.14880.out
cov.shardddl1.dmctl.1715049510.14915.out
cov.shardddl1.dmctl.1715049517.15085.out
cov.shardddl1.dmctl.1715049518.15225.out
cov.shardddl1.dmctl.1715049519.15261.out
cov.shardddl1.dmctl.1715049522.15352.out
cov.shardddl1.dmctl.1715049523.15492.out
cov.shardddl1.dmctl.1715049524.15519.out
cov.shardddl1.dmctl.1715049525.15604.out
cov.shardddl1.master.out
cov.shardddl1.worker.8262.1715049247.out
cov.shardddl1.worker.8262.1715049253.out
cov.shardddl1.worker.8262.1715049290.out
cov.shardddl1.worker.8262.1715049326.out
cov.shardddl1.worker.8262.1715049348.out
cov.shardddl1.worker.8262.1715049489.out
cov.shardddl1.worker.8262.1715049496.out
cov.shardddl1.worker.8262.1715049501.out
cov.shardddl1.worker.8263.1715049248.out
cov.shardddl1.worker.8263.1715049253.out
cov.shardddl1.worker.8263.1715049290.out
cov.shardddl1.worker.8263.1715049326.out
cov.shardddl1.worker.8263.1715049348.out
cov.shardddl1.worker.8263.1715049489.out
cov.shardddl1.worker.8263.1715049496.out
cov.shardddl1.worker.8263.1715049501.out
cov.shardddl1_1.dmctl.1715049533.16038.out
cov.shardddl1_1.dmctl.1715049535.16150.out
cov.shardddl1_1.dmctl.1715049536.16239.out
cov.shardddl1_1.dmctl.1715049537.16298.out
cov.shardddl1_1.dmctl.1715049540.16637.out
cov.shardddl1_1.dmctl.1715049540.16772.out
cov.shardddl1_1.dmctl.1715049542.16812.out
cov.shardddl1_1.dmctl.1715049542.17069.out
cov.shardddl1_1.dmctl.1715049543.17217.out
cov.shardddl1_1.dmctl.1715049544.17251.out
cov.shardddl1_1.dmctl.1715049546.17367.out
cov.shardddl1_1.dmctl.1715049548.17507.out
cov.shardddl1_1.dmctl.1715049549.17550.out
cov.shardddl1_1.dmctl.1715049551.17663.out
cov.shardddl1_1.dmctl.1715049553.17803.out
cov.shardddl1_1.dmctl.1715049554.17840.out
cov.shardddl1_1.dmctl.1715049556.17960.out
cov.shardddl1_1.dmctl.1715049558.18095.out
cov.shardddl1_1.dmctl.1715049559.18131.out
cov.shardddl1_1.dmctl.1715049561.18247.out
cov.shardddl1_1.dmctl.1715049563.18389.out
cov.shardddl1_1.dmctl.1715049564.18420.out
cov.shardddl1_1.dmctl.1715049566.18560.out
cov.shardddl1_1.dmctl.1715049568.18698.out
cov.shardddl1_1.dmctl.1715049569.18735.out
cov.shardddl1_1.dmctl.1715049569.18802.out
cov.shardddl1_1.dmctl.1715049571.18943.out
cov.shardddl1_1.dmctl.1715049572.18984.out
cov.shardddl1_1.dmctl.1715049572.19053.out
cov.shardddl1_1.dmctl.1715049574.19197.out
cov.shardddl1_1.dmctl.1715049576.19228.out
cov.shardddl1_1.dmctl.1715049577.19329.out
cov.shardddl1_1.dmctl.1715049578.19467.out
cov.shardddl1_1.dmctl.1715049579.19503.out
cov.shardddl1_1.dmctl.1715049582.19579.out
cov.shardddl1_1.dmctl.1715049589.19719.out
cov.shardddl1_1.dmctl.1715049590.19746.out
cov.shardddl1_1.dmctl.1715049593.19800.out
cov.shardddl1_1.dmctl.1715049594.19939.out
cov.shardddl1_1.dmctl.1715049595.19969.out
cov.shardddl1_1.dmctl.1715049595.20015.out
cov.shardddl1_1.dmctl.1715049597.20152.out
cov.shardddl1_1.dmctl.1715049598.20183.out
cov.shardddl1_1.dmctl.1715049598.20229.out
cov.shardddl1_1.dmctl.1715049599.20365.out
cov.shardddl1_1.dmctl.1715049601.20396.out
cov.shardddl1_1.dmctl.1715049601.20443.out
cov.shardddl1_1.dmctl.1715049602.20577.out
cov.shardddl1_1.dmctl.1715049603.20610.out
cov.shardddl1_1.dmctl.1715049604.20649.out
cov.shardddl1_1.dmctl.1715049604.20782.out
cov.shardddl1_1.dmctl.1715049605.20812.out
cov.shardddl1_1.dmctl.1715049605.20854.out
cov.shardddl1_1.dmctl.1715049607.20989.out
cov.shardddl1_1.dmctl.1715049608.21021.out
cov.shardddl1_1.dmctl.1715049610.21079.out
cov.shardddl1_1.dmctl.1715049612.21222.out
cov.shardddl1_1.dmctl.1715049613.21253.out
cov.shardddl1_1.dmctl.1715049615.21304.out
cov.shardddl1_1.dmctl.1715049616.21440.out
cov.shardddl1_1.dmctl.1715049618.21469.out
cov.shardddl1_1.dmctl.1715049620.21522.out
cov.shardddl1_1.dmctl.1715049621.21668.out
cov.shardddl1_1.dmctl.1715049623.21700.out
cov.shardddl1_1.dmctl.1715049623.21738.out
cov.shardddl1_1.dmctl.1715049623.21773.out
cov.shardddl1_1.dmctl.1715049624.21913.out
cov.shardddl1_1.dmctl.1715049625.21944.out
cov.shardddl1_1.dmctl.1715049626.22036.out
cov.shardddl1_1.dmctl.1715049627.22179.out
cov.shardddl1_1.dmctl.1715049628.22209.out
cov.shardddl1_1.dmctl.1715049629.22264.out
cov.shardddl1_1.dmctl.1715049629.22300.out
cov.shardddl1_1.dmctl.1715049630.22442.out
cov.shardddl1_1.dmctl.1715049631.22471.out
cov.shardddl1_1.dmctl.1715049631.22507.out
cov.shardddl1_1.dmctl.1715049632.22532.out
cov.shardddl1_1.dmctl.1715049633.22672.out
cov.shardddl1_1.dmctl.1715049634.22703.out
cov.shardddl1_1.dmctl.1715049637.22820.out
cov.shardddl1_1.dmctl.1715049637.22957.out
cov.shardddl1_1.dmctl.1715049638.22986.out
cov.shardddl1_1.dmctl.1715049639.23074.out
cov.shardddl1_1.dmctl.1715049640.23213.out
cov.shardddl1_1.dmctl.1715049641.23246.out
cov.shardddl1_1.dmctl.1715049642.23303.out
cov.shardddl1_1.dmctl.1715049642.23440.out
cov.shardddl1_1.dmctl.1715049643.23477.out
cov.shardddl1_1.dmctl.1715049644.23534.out
cov.shardddl1_1.dmctl.1715049644.23572.out
cov.shardddl1_1.dmctl.1715049646.23718.out
cov.shardddl1_1.dmctl.1715049647.23749.out
cov.shardddl1_1.dmctl.1715049652.23949.out
cov.shardddl1_1.dmctl.1715049653.24096.out
cov.shardddl1_1.dmctl.1715049654.24125.out
cov.shardddl1_1.dmctl.1715049657.24263.out
cov.shardddl1_1.dmctl.1715049658.24405.out
cov.shardddl1_1.dmctl.1715049660.24435.out
cov.shardddl1_1.dmctl.1715049660.24501.out
cov.shardddl1_1.dmctl.1715049660.24535.out
cov.shardddl1_1.dmctl.1715049660.24679.out
cov.shardddl1_1.dmctl.1715049662.24712.out
cov.shardddl1_1.dmctl.1715049662.24778.out
cov.shardddl1_1.dmctl.1715049662.24812.out
cov.shardddl1_1.dmctl.1715049663.24954.out
cov.shardddl1_1.dmctl.1715049665.24986.out
cov.shardddl1_1.dmctl.1715049667.25093.out
cov.shardddl1_1.master.out
cov.shardddl1_1.worker.8262.1715049532.out
cov.shardddl1_1.worker.8263.1715049534.out
cov.shardddl2.dmctl.1715049677.25531.out
cov.shardddl2.dmctl.1715049679.25652.out
cov.shardddl2.dmctl.1715049680.25737.out
cov.shardddl2.dmctl.1715049682.25807.out
cov.shardddl2.dmctl.1715049687.26004.out
cov.shardddl2.dmctl.1715049694.26198.out
cov.shardddl2.dmctl.1715049698.26369.out
cov.shardddl2.dmctl.1715049700.26471.out
cov.shardddl2.dmctl.1715049700.26614.out
cov.shardddl2.dmctl.1715049702.26650.out
cov.shardddl2.dmctl.1715049711.26930.out
cov.shardddl2.dmctl.1715049711.27004.out
cov.shardddl2.dmctl.1715049712.27143.out
cov.shardddl2.dmctl.1715049713.27174.out
cov.shardddl2.dmctl.1715049718.27346.out
cov.shardddl2.dmctl.1715049726.27538.out
cov.shardddl2.dmctl.1715049729.27692.out
cov.shardddl2.dmctl.1715049734.27818.out
cov.shardddl2.dmctl.1715049734.27956.out
cov.shardddl2.dmctl.1715049735.27994.out
cov.shardddl2.dmctl.1715049755.28452.out
cov.shardddl2.dmctl.1715049755.28485.out
cov.shardddl2.dmctl.1715049755.28624.out
cov.shardddl2.dmctl.1715049756.28663.out
cov.shardddl2.dmctl.1715049759.28766.out
cov.shardddl2.dmctl.1715049759.28812.out
cov.shardddl2.dmctl.1715049759.28852.out
cov.shardddl2.dmctl.1715049765.29005.out
cov.shardddl2.dmctl.1715049766.29051.out
cov.shardddl2.dmctl.1715049766.29083.out
cov.shardddl2.dmctl.1715049766.29164.out
cov.shardddl2.dmctl.1715049766.29304.out
cov.shardddl2.dmctl.1715049768.29333.out
cov.shardddl2.dmctl.1715049777.29588.out
cov.shardddl2.dmctl.1715049777.29647.out
cov.shardddl2.dmctl.1715049777.29686.out
cov.shardddl2.dmctl.1715049783.29839.out
cov.shardddl2.dmctl.1715049784.29878.out
cov.shardddl2.dmctl.1715049784.29912.out
cov.shardddl2.dmctl.1715049784.29999.out
cov.shardddl2.dmctl.1715049786.30141.out
cov.shardddl2.dmctl.1715049787.30180.out
cov.shardddl2.dmctl.1715049795.30432.out
cov.shardddl2.dmctl.1715049795.30482.out
cov.shardddl2.dmctl.1715049795.30523.out
cov.shardddl2.dmctl.1715049802.30677.out
cov.shardddl2.dmctl.1715049802.30714.out
cov.shardddl2.dmctl.1715049802.30750.out
cov.shardddl2.dmctl.1715049802.30847.out
cov.shardddl2.dmctl.1715049803.30979.out
cov.shardddl2.dmctl.1715049804.31022.out
cov.shardddl2.dmctl.1715049806.31127.out
cov.shardddl2.dmctl.1715049813.31314.out
cov.shardddl2.dmctl.1715049815.31356.out
cov.shardddl2.dmctl.1715049822.31500.out
cov.shardddl2.dmctl.1715049822.31531.out
cov.shardddl2.dmctl.1715049822.31565.out
cov.shardddl2.dmctl.1715049822.31672.out
cov.shardddl2.dmctl.1715049822.31811.out
cov.shardddl2.dmctl.1715049824.31843.out
cov.shardddl2.dmctl.1715049826.31948.out
cov.shardddl2.dmctl.1715049833.32136.out
cov.shardddl2.dmctl.1715049833.32173.out
cov.shardddl2.dmctl.1715049840.32333.out
cov.shardddl2.dmctl.1715049840.32369.out
cov.shardddl2.dmctl.1715049840.32402.out
cov.shardddl2.dmctl.1715049840.32497.out
cov.shardddl2.dmctl.1715049840.32629.out
cov.shardddl2.dmctl.1715049842.32682.out
cov.shardddl2.dmctl.1715049844.32788.out
cov.shardddl2.dmctl.1715049845.32836.out
cov.shardddl2.dmctl.1715049851.33012.out
cov.shardddl2.master.out
cov.shardddl2.worker.8262.1715049676.out
cov.shardddl2.worker.8262.1715049684.out
cov.shardddl2.worker.8263.1715049678.out
cov.shardddl2.worker.8263.1715049715.out
downstream
goroutines
shardddl1
shardddl1_1
shardddl2
sql_res.shardddl1.txt
sql_res.shardddl1_1.txt
sql_res.shardddl2.txt
tidb.toml
++ find /tmp/dm_test/ -type f -name '*.log'
+ tar -cvzf log-G07.tar.gz /tmp/dm_test/shardddl2/worker1/log/stdout.log /tmp/dm_test/shardddl2/worker1/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715049711.log /tmp/dm_test/shardddl2/dmctl.1715049806.log /tmp/dm_test/shardddl2/dmctl.1715049734.log /tmp/dm_test/shardddl2/dmctl.1715049735.log /tmp/dm_test/shardddl2/dmctl.1715049784.log /tmp/dm_test/shardddl2/dmctl.1715049682.log /tmp/dm_test/shardddl2/worker2/log/stdout.log /tmp/dm_test/shardddl2/worker2/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1715049712.log /tmp/dm_test/shardddl2/dmctl.1715049803.log /tmp/dm_test/shardddl2/dmctl.1715049815.log /tmp/dm_test/shardddl2/dmctl.1715049783.log /tmp/dm_test/shardddl2/dmctl.1715049786.log /tmp/dm_test/shardddl2/dmctl.1715049777.log /tmp/dm_test/shardddl2/dmctl.1715049833.log /tmp/dm_test/shardddl2/dmctl.1715049755.log /tmp/dm_test/shardddl2/dmctl.1715049679.log /tmp/dm_test/shardddl2/dmctl.1715049713.log /tmp/dm_test/shardddl2/dmctl.1715049826.log /tmp/dm_test/shardddl2/dmctl.1715049700.log /tmp/dm_test/shardddl2/dmctl.1715049804.log /tmp/dm_test/shardddl2/dmctl.1715049840.log /tmp/dm_test/shardddl2/dmctl.1715049694.log /tmp/dm_test/shardddl2/dmctl.1715049813.log /tmp/dm_test/shardddl2/dmctl.1715049802.log /tmp/dm_test/shardddl2/dmctl.1715049677.log /tmp/dm_test/shardddl2/dmctl.1715049851.log /tmp/dm_test/shardddl2/dmctl.1715049687.log /tmp/dm_test/shardddl2/dmctl.1715049718.log /tmp/dm_test/shardddl2/dmctl.1715049795.log /tmp/dm_test/shardddl2/dmctl.1715049726.log /tmp/dm_test/shardddl2/dmctl.1715049766.log /tmp/dm_test/shardddl2/dmctl.1715049844.log /tmp/dm_test/shardddl2/dmctl.1715049845.log /tmp/dm_test/shardddl2/dmctl.1715049765.log /tmp/dm_test/shardddl2/dmctl.1715049824.log /tmp/dm_test/shardddl2/master/log/stdout.log /tmp/dm_test/shardddl2/master/log/dm-master.log /tmp/dm_test/shardddl2/dmctl.1715049702.log /tmp/dm_test/shardddl2/sync_diff_stdout.log /tmp/dm_test/shardddl2/dmctl.1715049698.log /tmp/dm_test/shardddl2/dmctl.1715049822.log /tmp/dm_test/shardddl2/dmctl.1715049759.log /tmp/dm_test/shardddl2/dmctl.1715049768.log /tmp/dm_test/shardddl2/dmctl.1715049680.log /tmp/dm_test/shardddl2/dmctl.1715049787.log /tmp/dm_test/shardddl2/dmctl.1715049842.log /tmp/dm_test/shardddl2/dmctl.1715049756.log /tmp/dm_test/shardddl2/dmctl.1715049729.log /tmp/dm_test/goroutines/stack/log/master-8361.log /tmp/dm_test/goroutines/stack/log/master-8261.log /tmp/dm_test/goroutines/stack/log/worker-8262.log /tmp/dm_test/goroutines/stack/log/worker-8263.log /tmp/dm_test/goroutines/stack/log/master-8761.log /tmp/dm_test/goroutines/stack/log/master-8661.log /tmp/dm_test/goroutines/stack/log/worker-18263.log /tmp/dm_test/goroutines/stack/log/worker-8264.log /tmp/dm_test/goroutines/stack/log/master-8561.log /tmp/dm_test/goroutines/stack/log/worker-18262.log /tmp/dm_test/goroutines/stack/log/master-8461.log /tmp/dm_test/shardddl1/worker1/log/stdout.log /tmp/dm_test/shardddl1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715049323.log /tmp/dm_test/shardddl1/dmctl.1715049362.log /tmp/dm_test/shardddl1/dmctl.1715049475.log /tmp/dm_test/shardddl1/dmctl.1715049480.log /tmp/dm_test/shardddl1/dmctl.1715049390.log /tmp/dm_test/shardddl1/dmctl.1715049396.log /tmp/dm_test/shardddl1/dmctl.1715049453.log /tmp/dm_test/shardddl1/dmctl.1715049497.log /tmp/dm_test/shardddl1/dmctl.1715049524.log /tmp/dm_test/shardddl1/dmctl.1715049507.log /tmp/dm_test/shardddl1/dmctl.1715049439.log /tmp/dm_test/shardddl1/dmctl.1715049328.log /tmp/dm_test/shardddl1/worker2/log/stdout.log /tmp/dm_test/shardddl1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1715049354.log /tmp/dm_test/shardddl1/dmctl.1715049519.log /tmp/dm_test/shardddl1/dmctl.1715049385.log /tmp/dm_test/shardddl1/dmctl.1715049248.log /tmp/dm_test/shardddl1/dmctl.1715049356.log /tmp/dm_test/shardddl1/dmctl.1715049479.log /tmp/dm_test/shardddl1/dmctl.1715049441.log /tmp/dm_test/shardddl1/dmctl.1715049351.log /tmp/dm_test/shardddl1/dmctl.1715049371.log /tmp/dm_test/shardddl1/dmctl.1715049504.log /tmp/dm_test/shardddl1/dmctl.1715049369.log /tmp/dm_test/shardddl1/dmctl.1715049373.log /tmp/dm_test/shardddl1/dmctl.1715049350.log /tmp/dm_test/shardddl1/dmctl.1715049491.log /tmp/dm_test/shardddl1/dmctl.1715049254.log /tmp/dm_test/shardddl1/dmctl.1715049255.log /tmp/dm_test/shardddl1/dmctl.1715049380.log /tmp/dm_test/shardddl1/dmctl.1715049345.log /tmp/dm_test/shardddl1/dmctl.1715049465.log /tmp/dm_test/shardddl1/dmctl.1715049379.log /tmp/dm_test/shardddl1/dmctl.1715049486.log /tmp/dm_test/shardddl1/dmctl.1715049287.log /tmp/dm_test/shardddl1/dmctl.1715049518.log /tmp/dm_test/shardddl1/dmctl.1715049361.log /tmp/dm_test/shardddl1/dmctl.1715049502.log /tmp/dm_test/shardddl1/dmctl.1715049517.log /tmp/dm_test/shardddl1/dmctl.1715049523.log /tmp/dm_test/shardddl1/dmctl.1715049391.log /tmp/dm_test/shardddl1/dmctl.1715049292.log /tmp/dm_test/shardddl1/dmctl.1715049355.log /tmp/dm_test/shardddl1/dmctl.1715049493.log /tmp/dm_test/shardddl1/dmctl.1715049372.log /tmp/dm_test/shardddl1/dmctl.1715049508.log /tmp/dm_test/shardddl1/dmctl.1715049398.log /tmp/dm_test/shardddl1/dmctl.1715049522.log /tmp/dm_test/shardddl1/dmctl.1715049464.log /tmp/dm_test/shardddl1/dmctl.1715049481.log /tmp/dm_test/shardddl1/dmctl.1715049450.log /tmp/dm_test/shardddl1/dmctl.1715049454.log /tmp/dm_test/shardddl1/dmctl.1715049402.log /tmp/dm_test/shardddl1/dmctl.1715049376.log /tmp/dm_test/shardddl1/dmctl.1715049357.log /tmp/dm_test/shardddl1/master/log/stdout.log /tmp/dm_test/shardddl1/master/log/dm-master.log /tmp/dm_test/shardddl1/dmctl.1715049249.log /tmp/dm_test/shardddl1/dmctl.1715049498.log /tmp/dm_test/shardddl1/dmctl.1715049456.log /tmp/dm_test/shardddl1/dmctl.1715049490.log /tmp/dm_test/shardddl1/sync_diff_stdout.log /tmp/dm_test/shardddl1/dmctl.1715049437.log /tmp/dm_test/shardddl1/dmctl.1715049377.log /tmp/dm_test/shardddl1/dmctl.1715049525.log /tmp/dm_test/shardddl1/dmctl.1715049291.log /tmp/dm_test/shardddl1/dmctl.1715049484.log /tmp/dm_test/shardddl1/dmctl.1715049510.log /tmp/dm_test/shardddl1/dmctl.1715049381.log /tmp/dm_test/shardddl1/dmctl.1715049506.log /tmp/dm_test/shardddl1/dmctl.1715049352.log /tmp/dm_test/shardddl1/dmctl.1715049327.log /tmp/dm_test/shardddl1/dmctl.1715049476.log /tmp/dm_test/shardddl1/dmctl.1715049349.log /tmp/dm_test/shardddl1_1/dmctl.1715049542.log /tmp/dm_test/shardddl1_1/worker1/log/stdout.log /tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715049662.log /tmp/dm_test/shardddl1_1/dmctl.1715049646.log /tmp/dm_test/shardddl1_1/dmctl.1715049558.log /tmp/dm_test/shardddl1_1/dmctl.1715049533.log /tmp/dm_test/shardddl1_1/dmctl.1715049631.log /tmp/dm_test/shardddl1_1/dmctl.1715049641.log /tmp/dm_test/shardddl1_1/dmctl.1715049608.log /tmp/dm_test/shardddl1_1/dmctl.1715049566.log /tmp/dm_test/shardddl1_1/dmctl.1715049652.log /tmp/dm_test/shardddl1_1/worker2/log/stdout.log /tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1715049665.log /tmp/dm_test/shardddl1_1/dmctl.1715049638.log /tmp/dm_test/shardddl1_1/dmctl.1715049599.log /tmp/dm_test/shardddl1_1/dmctl.1715049571.log /tmp/dm_test/shardddl1_1/dmctl.1715049625.log /tmp/dm_test/shardddl1_1/dmctl.1715049597.log /tmp/dm_test/shardddl1_1/dmctl.1715049632.log /tmp/dm_test/shardddl1_1/dmctl.1715049544.log /tmp/dm_test/shardddl1_1/dmctl.1715049621.log /tmp/dm_test/shardddl1_1/dmctl.1715049626.log /tmp/dm_test/shardddl1_1/dmctl.1715049590.log /tmp/dm_test/shardddl1_1/dmctl.1715049613.log /tmp/dm_test/shardddl1_1/dmctl.1715049594.log /tmp/dm_test/shardddl1_1/dmctl.1715049640.log /tmp/dm_test/shardddl1_1/dmctl.1715049549.log /tmp/dm_test/shardddl1_1/dmctl.1715049598.log /tmp/dm_test/shardddl1_1/dmctl.1715049577.log /tmp/dm_test/shardddl1_1/dmctl.1715049667.log /tmp/dm_test/shardddl1_1/dmctl.1715049624.log /tmp/dm_test/shardddl1_1/dmctl.1715049627.log /tmp/dm_test/shardddl1_1/dmctl.1715049537.log /tmp/dm_test/shardddl1_1/dmctl.1715049618.log /tmp/dm_test/shardddl1_1/dmctl.1715049647.log /tmp/dm_test/shardddl1_1/dmctl.1715049536.log /tmp/dm_test/shardddl1_1/dmctl.1715049548.log /tmp/dm_test/shardddl1_1/dmctl.1715049605.log /tmp/dm_test/shardddl1_1/dmctl.1715049615.log /tmp/dm_test/shardddl1_1/dmctl.1715049579.log /tmp/dm_test/shardddl1_1/dmctl.1715049582.log /tmp/dm_test/shardddl1_1/dmctl.1715049574.log /tmp/dm_test/shardddl1_1/dmctl.1715049543.log /tmp/dm_test/shardddl1_1/dmctl.1715049546.log /tmp/dm_test/shardddl1_1/dmctl.1715049660.log /tmp/dm_test/shardddl1_1/dmctl.1715049630.log /tmp/dm_test/shardddl1_1/dmctl.1715049654.log /tmp/dm_test/shardddl1_1/dmctl.1715049610.log /tmp/dm_test/shardddl1_1/dmctl.1715049633.log /tmp/dm_test/shardddl1_1/dmctl.1715049602.log /tmp/dm_test/shardddl1_1/dmctl.1715049554.log /tmp/dm_test/shardddl1_1/dmctl.1715049616.log /tmp/dm_test/shardddl1_1/dmctl.1715049563.log /tmp/dm_test/shardddl1_1/dmctl.1715049564.log /tmp/dm_test/shardddl1_1/dmctl.1715049629.log /tmp/dm_test/shardddl1_1/dmctl.1715049644.log /tmp/dm_test/shardddl1_1/dmctl.1715049601.log /tmp/dm_test/shardddl1_1/dmctl.1715049595.log /tmp/dm_test/shardddl1_1/dmctl.1715049576.log /tmp/dm_test/shardddl1_1/dmctl.1715049559.log /tmp/dm_test/shardddl1_1/dmctl.1715049642.log /tmp/dm_test/shardddl1_1/dmctl.1715049556.log /tmp/dm_test/shardddl1_1/dmctl.1715049604.log /tmp/dm_test/shardddl1_1/dmctl.1715049551.log /tmp/dm_test/shardddl1_1/dmctl.1715049639.log /tmp/dm_test/shardddl1_1/dmctl.1715049620.log /tmp/dm_test/shardddl1_1/dmctl.1715049561.log /tmp/dm_test/shardddl1_1/dmctl.1715049568.log /tmp/dm_test/shardddl1_1/master/log/stdout.log /tmp/dm_test/shardddl1_1/master/log/dm-master.log /tmp/dm_test/shardddl1_1/dmctl.1715049569.log /tmp/dm_test/shardddl1_1/dmctl.1715049535.log /tmp/dm_test/shardddl1_1/dmctl.1715049603.log /tmp/dm_test/shardddl1_1/sync_diff_stdout.log /tmp/dm_test/shardddl1_1/dmctl.1715049623.log /tmp/dm_test/shardddl1_1/dmctl.1715049663.log /tmp/dm_test/shardddl1_1/dmctl.1715049612.log /tmp/dm_test/shardddl1_1/dmctl.1715049607.log /tmp/dm_test/shardddl1_1/dmctl.1715049572.log /tmp/dm_test/shardddl1_1/dmctl.1715049540.log /tmp/dm_test/shardddl1_1/dmctl.1715049657.log /tmp/dm_test/shardddl1_1/dmctl.1715049637.log /tmp/dm_test/shardddl1_1/dmctl.1715049589.log /tmp/dm_test/shardddl1_1/dmctl.1715049643.log /tmp/dm_test/shardddl1_1/dmctl.1715049653.log /tmp/dm_test/shardddl1_1/dmctl.1715049628.log /tmp/dm_test/shardddl1_1/dmctl.1715049658.log /tmp/dm_test/shardddl1_1/dmctl.1715049634.log /tmp/dm_test/shardddl1_1/dmctl.1715049553.log /tmp/dm_test/shardddl1_1/dmctl.1715049593.log /tmp/dm_test/shardddl1_1/dmctl.1715049578.log /tmp/dm_test/downstream/tidb/log/tidb.log
tar: Removing leading `/' from member names
/tmp/dm_test/shardddl2/worker1/log/stdout.log
/tmp/dm_test/shardddl2/worker1/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715049711.log
/tmp/dm_test/shardddl2/dmctl.1715049806.log
/tmp/dm_test/shardddl2/dmctl.1715049734.log
/tmp/dm_test/shardddl2/dmctl.1715049735.log
/tmp/dm_test/shardddl2/dmctl.1715049784.log
/tmp/dm_test/shardddl2/dmctl.1715049682.log
/tmp/dm_test/shardddl2/worker2/log/stdout.log
/tmp/dm_test/shardddl2/worker2/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1715049712.log
/tmp/dm_test/shardddl2/dmctl.1715049803.log
/tmp/dm_test/shardddl2/dmctl.1715049815.log
/tmp/dm_test/shardddl2/dmctl.1715049783.log
/tmp/dm_test/shardddl2/dmctl.1715049786.log
/tmp/dm_test/shardddl2/dmctl.1715049777.log
/tmp/dm_test/shardddl2/dmctl.1715049833.log
/tmp/dm_test/shardddl2/dmctl.1715049755.log
/tmp/dm_test/shardddl2/dmctl.1715049679.log
/tmp/dm_test/shardddl2/dmctl.1715049713.log
/tmp/dm_test/shardddl2/dmctl.1715049826.log
/tmp/dm_test/shardddl2/dmctl.1715049700.log
/tmp/dm_test/shardddl2/dmctl.1715049804.log
/tmp/dm_test/shardddl2/dmctl.1715049840.log
/tmp/dm_test/shardddl2/dmctl.1715049694.log
/tmp/dm_test/shardddl2/dmctl.1715049813.log
/tmp/dm_test/shardddl2/dmctl.1715049802.log
/tmp/dm_test/shardddl2/dmctl.1715049677.log
/tmp/dm_test/shardddl2/dmctl.1715049851.log
/tmp/dm_test/shardddl2/dmctl.1715049687.log
/tmp/dm_test/shardddl2/dmctl.1715049718.log
/tmp/dm_test/shardddl2/dmctl.1715049795.log
/tmp/dm_test/shardddl2/dmctl.1715049726.log
/tmp/dm_test/shardddl2/dmctl.1715049766.log
/tmp/dm_test/shardddl2/dmctl.1715049844.log
/tmp/dm_test/shardddl2/dmctl.1715049845.log
/tmp/dm_test/shardddl2/dmctl.1715049765.log
/tmp/dm_test/shardddl2/dmctl.1715049824.log
/tmp/dm_test/shardddl2/master/log/stdout.log
/tmp/dm_test/shardddl2/master/log/dm-master.log
/tmp/dm_test/shardddl2/dmctl.1715049702.log
/tmp/dm_test/shardddl2/sync_diff_stdout.log
/tmp/dm_test/shardddl2/dmctl.1715049698.log
/tmp/dm_test/shardddl2/dmctl.1715049822.log
/tmp/dm_test/shardddl2/dmctl.1715049759.log
/tmp/dm_test/shardddl2/dmctl.1715049768.log
/tmp/dm_test/shardddl2/dmctl.1715049680.log
/tmp/dm_test/shardddl2/dmctl.1715049787.log
/tmp/dm_test/shardddl2/dmctl.1715049842.log
/tmp/dm_test/shardddl2/dmctl.1715049756.log
/tmp/dm_test/shardddl2/dmctl.1715049729.log
/tmp/dm_test/goroutines/stack/log/master-8361.log
/tmp/dm_test/goroutines/stack/log/master-8261.log
/tmp/dm_test/goroutines/stack/log/worker-8262.log
/tmp/dm_test/goroutines/stack/log/worker-8263.log
/tmp/dm_test/goroutines/stack/log/master-8761.log
/tmp/dm_test/goroutines/stack/log/master-8661.log
/tmp/dm_test/goroutines/stack/log/worker-18263.log
/tmp/dm_test/goroutines/stack/log/worker-8264.log
/tmp/dm_test/goroutines/stack/log/master-8561.log
/tmp/dm_test/goroutines/stack/log/worker-18262.log
/tmp/dm_test/goroutines/stack/log/master-8461.log
/tmp/dm_test/shardddl1/worker1/log/stdout.log
/tmp/dm_test/shardddl1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715049323.log
/tmp/dm_test/shardddl1/dmctl.1715049362.log
/tmp/dm_test/shardddl1/dmctl.1715049475.log
/tmp/dm_test/shardddl1/dmctl.1715049480.log
/tmp/dm_test/shardddl1/dmctl.1715049390.log
/tmp/dm_test/shardddl1/dmctl.1715049396.log
/tmp/dm_test/shardddl1/dmctl.1715049453.log
/tmp/dm_test/shardddl1/dmctl.1715049497.log
/tmp/dm_test/shardddl1/dmctl.1715049524.log
/tmp/dm_test/shardddl1/dmctl.1715049507.log
/tmp/dm_test/shardddl1/dmctl.1715049439.log
/tmp/dm_test/shardddl1/dmctl.1715049328.log
/tmp/dm_test/shardddl1/worker2/log/stdout.log
/tmp/dm_test/shardddl1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1715049354.log
/tmp/dm_test/shardddl1/dmctl.1715049519.log
/tmp/dm_test/shardddl1/dmctl.1715049385.log
/tmp/dm_test/shardddl1/dmctl.1715049248.log
/tmp/dm_test/shardddl1/dmctl.1715049356.log
/tmp/dm_test/shardddl1/dmctl.1715049479.log
/tmp/dm_test/shardddl1/dmctl.1715049441.log
/tmp/dm_test/shardddl1/dmctl.1715049351.log
/tmp/dm_test/shardddl1/dmctl.1715049371.log
/tmp/dm_test/shardddl1/dmctl.1715049504.log
/tmp/dm_test/shardddl1/dmctl.1715049369.log
/tmp/dm_test/shardddl1/dmctl.1715049373.log
/tmp/dm_test/shardddl1/dmctl.1715049350.log
/tmp/dm_test/shardddl1/dmctl.1715049491.log
/tmp/dm_test/shardddl1/dmctl.1715049254.log
/tmp/dm_test/shardddl1/dmctl.1715049255.log
/tmp/dm_test/shardddl1/dmctl.1715049380.log
/tmp/dm_test/shardddl1/dmctl.1715049345.log
/tmp/dm_test/shardddl1/dmctl.1715049465.log
/tmp/dm_test/shardddl1/dmctl.1715049379.log
/tmp/dm_test/shardddl1/dmctl.1715049486.log
/tmp/dm_test/shardddl1/dmctl.1715049287.log
/tmp/dm_test/shardddl1/dmctl.1715049518.log
/tmp/dm_test/shardddl1/dmctl.1715049361.log
/tmp/dm_test/shardddl1/dmctl.1715049502.log
/tmp/dm_test/shardddl1/dmctl.1715049517.log
/tmp/dm_test/shardddl1/dmctl.1715049523.log
/tmp/dm_test/shardddl1/dmctl.1715049391.log
/tmp/dm_test/shardddl1/dmctl.1715049292.log
/tmp/dm_test/shardddl1/dmctl.1715049355.log
/tmp/dm_test/shardddl1/dmctl.1715049493.log
/tmp/dm_test/shardddl1/dmctl.1715049372.log
/tmp/dm_test/shardddl1/dmctl.1715049508.log
/tmp/dm_test/shardddl1/dmctl.1715049398.log
/tmp/dm_test/shardddl1/dmctl.1715049522.log
/tmp/dm_test/shardddl1/dmctl.1715049464.log
/tmp/dm_test/shardddl1/dmctl.1715049481.log
/tmp/dm_test/shardddl1/dmctl.1715049450.log
/tmp/dm_test/shardddl1/dmctl.1715049454.log
/tmp/dm_test/shardddl1/dmctl.1715049402.log
/tmp/dm_test/shardddl1/dmctl.1715049376.log
/tmp/dm_test/shardddl1/dmctl.1715049357.log
/tmp/dm_test/shardddl1/master/log/stdout.log
/tmp/dm_test/shardddl1/master/log/dm-master.log
/tmp/dm_test/shardddl1/dmctl.1715049249.log
/tmp/dm_test/shardddl1/dmctl.1715049498.log
/tmp/dm_test/shardddl1/dmctl.1715049456.log
/tmp/dm_test/shardddl1/dmctl.1715049490.log
/tmp/dm_test/shardddl1/sync_diff_stdout.log
/tmp/dm_test/shardddl1/dmctl.1715049437.log
/tmp/dm_test/shardddl1/dmctl.1715049377.log
/tmp/dm_test/shardddl1/dmctl.1715049525.log
/tmp/dm_test/shardddl1/dmctl.1715049291.log
/tmp/dm_test/shardddl1/dmctl.1715049484.log
/tmp/dm_test/shardddl1/dmctl.1715049510.log
/tmp/dm_test/shardddl1/dmctl.1715049381.log
/tmp/dm_test/shardddl1/dmctl.1715049506.log
/tmp/dm_test/shardddl1/dmctl.1715049352.log
/tmp/dm_test/shardddl1/dmctl.1715049327.log
/tmp/dm_test/shardddl1/dmctl.1715049476.log
/tmp/dm_test/shardddl1/dmctl.1715049349.log
/tmp/dm_test/shardddl1_1/dmctl.1715049542.log
/tmp/dm_test/shardddl1_1/worker1/log/stdout.log
/tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715049662.log
/tmp/dm_test/shardddl1_1/dmctl.1715049646.log
/tmp/dm_test/shardddl1_1/dmctl.1715049558.log
/tmp/dm_test/shardddl1_1/dmctl.1715049533.log
/tmp/dm_test/shardddl1_1/dmctl.1715049631.log
/tmp/dm_test/shardddl1_1/dmctl.1715049641.log
/tmp/dm_test/shardddl1_1/dmctl.1715049608.log
/tmp/dm_test/shardddl1_1/dmctl.1715049566.log
/tmp/dm_test/shardddl1_1/dmctl.1715049652.log
/tmp/dm_test/shardddl1_1/worker2/log/stdout.log
/tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1715049665.log
/tmp/dm_test/shardddl1_1/dmctl.1715049638.log
/tmp/dm_test/shardddl1_1/dmctl.1715049599.log
/tmp/dm_test/shardddl1_1/dmctl.1715049571.log
/tmp/dm_test/shardddl1_1/dmctl.1715049625.log
/tmp/dm_test/shardddl1_1/dmctl.1715049597.log
/tmp/dm_test/shardddl1_1/dmctl.1715049632.log
/tmp/dm_test/shardddl1_1/dmctl.1715049544.log
/tmp/dm_test/shardddl1_1/dmctl.1715049621.log
/tmp/dm_test/shardddl1_1/dmctl.1715049626.log
/tmp/dm_test/shardddl1_1/dmctl.1715049590.log
/tmp/dm_test/shardddl1_1/dmctl.1715049613.log
/tmp/dm_test/shardddl1_1/dmctl.1715049594.log
/tmp/dm_test/shardddl1_1/dmctl.1715049640.log
/tmp/dm_test/shardddl1_1/dmctl.1715049549.log
/tmp/dm_test/shardddl1_1/dmctl.1715049598.log
/tmp/dm_test/shardddl1_1/dmctl.1715049577.log
/tmp/dm_test/shardddl1_1/dmctl.1715049667.log
/tmp/dm_test/shardddl1_1/dmctl.1715049624.log
/tmp/dm_test/shardddl1_1/dmctl.1715049627.log
/tmp/dm_test/shardddl1_1/dmctl.1715049537.log
/tmp/dm_test/shardddl1_1/dmctl.1715049618.log
/tmp/dm_test/shardddl1_1/dmctl.1715049647.log
/tmp/dm_test/shardddl1_1/dmctl.1715049536.log
/tmp/dm_test/shardddl1_1/dmctl.1715049548.log
/tmp/dm_test/shardddl1_1/dmctl.1715049605.log
/tmp/dm_test/shardddl1_1/dmctl.1715049615.log
/tmp/dm_test/shardddl1_1/dmctl.1715049579.log
/tmp/dm_test/shardddl1_1/dmctl.1715049582.log
/tmp/dm_test/shardddl1_1/dmctl.1715049574.log
/tmp/dm_test/shardddl1_1/dmctl.1715049543.log
/tmp/dm_test/shardddl1_1/dmctl.1715049546.log
/tmp/dm_test/shardddl1_1/dmctl.1715049660.log
/tmp/dm_test/shardddl1_1/dmctl.1715049630.log
/tmp/dm_test/shardddl1_1/dmctl.1715049654.log
/tmp/dm_test/shardddl1_1/dmctl.1715049610.log
/tmp/dm_test/shardddl1_1/dmctl.1715049633.log
/tmp/dm_test/shardddl1_1/dmctl.1715049602.log
/tmp/dm_test/shardddl1_1/dmctl.1715049554.log
/tmp/dm_test/shardddl1_1/dmctl.1715049616.log
/tmp/dm_test/shardddl1_1/dmctl.1715049563.log
/tmp/dm_test/shardddl1_1/dmctl.1715049564.log
/tmp/dm_test/shardddl1_1/dmctl.1715049629.log
/tmp/dm_test/shardddl1_1/dmctl.1715049644.log
/tmp/dm_test/shardddl1_1/dmctl.1715049601.log
/tmp/dm_test/shardddl1_1/dmctl.1715049595.log
/tmp/dm_test/shardddl1_1/dmctl.1715049576.log
/tmp/dm_test/shardddl1_1/dmctl.1715049559.log
/tmp/dm_test/shardddl1_1/dmctl.1715049642.log
/tmp/dm_test/shardddl1_1/dmctl.1715049556.log
/tmp/dm_test/shardddl1_1/dmctl.1715049604.log
/tmp/dm_test/shardddl1_1/dmctl.1715049551.log
/tmp/dm_test/shardddl1_1/dmctl.1715049639.log
/tmp/dm_test/shardddl1_1/dmctl.1715049620.log
/tmp/dm_test/shardddl1_1/dmctl.1715049561.log
/tmp/dm_test/shardddl1_1/dmctl.1715049568.log
/tmp/dm_test/shardddl1_1/master/log/stdout.log
/tmp/dm_test/shardddl1_1/master/log/dm-master.log
/tmp/dm_test/shardddl1_1/dmctl.1715049569.log
/tmp/dm_test/shardddl1_1/dmctl.1715049535.log
/tmp/dm_test/shardddl1_1/dmctl.1715049603.log
/tmp/dm_test/shardddl1_1/sync_diff_stdout.log
/tmp/dm_test/shardddl1_1/dmctl.1715049623.log
/tmp/dm_test/shardddl1_1/dmctl.1715049663.log
/tmp/dm_test/shardddl1_1/dmctl.1715049612.log
/tmp/dm_test/shardddl1_1/dmctl.1715049607.log
/tmp/dm_test/shardddl1_1/dmctl.1715049572.log
/tmp/dm_test/shardddl1_1/dmctl.1715049540.log
/tmp/dm_test/shardddl1_1/dmctl.1715049657.log
/tmp/dm_test/shardddl1_1/dmctl.1715049637.log
/tmp/dm_test/shardddl1_1/dmctl.1715049589.log
/tmp/dm_test/shardddl1_1/dmctl.1715049643.log
/tmp/dm_test/shardddl1_1/dmctl.1715049653.log
/tmp/dm_test/shardddl1_1/dmctl.1715049628.log
/tmp/dm_test/shardddl1_1/dmctl.1715049658.log
/tmp/dm_test/shardddl1_1/dmctl.1715049634.log
/tmp/dm_test/shardddl1_1/dmctl.1715049553.log
/tmp/dm_test/shardddl1_1/dmctl.1715049593.log
/tmp/dm_test/shardddl1_1/dmctl.1715049578.log
/tmp/dm_test/downstream/tidb/log/tidb.log
+ ls -alh log-G07.tar.gz
-rw-r--r-- 1 jenkins jenkins 660K May  7 10:44 log-G07.tar.gz
[Pipeline] archiveArtifacts
Archiving artifacts
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
dmctl test cmd: "config source mysql-replica-01"
[Tue May  7 10:44:34 CST 2024] <<<<<< start DM-133 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
got=1 expected=1
got=1 expected=1
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
Sending interrupt signal to process
Killing processes
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test "processedRowsStatus": "insert\/update\/delete: 2\/0\/1" count: 0 != expected: 1, failed the 0-th time, will retry again
kill finished with exit code 0
dmctl test cmd: "query-status test"
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
restore config
make: *** [dm_integration_test_in_group] Terminated
/home/jenkins/agent/workspace/pingcap/tiflow/pull_dm_integration_test/tiflow/dm/tests/validator_basic/run.sh: line 53: 23621 Terminated              run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" "query-status test" "\"processedRowsStatus\": \"insert\/update\/delete: 2\/0\/1\"" 1 "pendingRowsStatus\": \"insert\/update\/delete: 2\/0\/1" 1 "new\/ignored\/resolved: 0\/0\/0" 1 "\"cutoverBinlogGtid\": \"\"" 1
restore time_zone
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
script returned exit code 143
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
[Pipeline] // cache
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // node
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 2
Finished: FAILURE