Skip to content

Console Output

Skipping 679 KB.. Full Log
3163733504 bytes in 13.40 secs (236069100 bytes/sec)
[Pipeline] {
wait process dm-worker.test exit...
process dm-worker.test already exit
[Mon Apr 29 14:31:24 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-master exit...
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8262 is alive
[Mon Apr 29 14:31:24 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/adjust_gtid/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] timeout
Timeout set to expire in 10 min
[Pipeline] {
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
wait process dm-master exit...
process dm-master already exit
dmctl test cmd: "query-status test"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
got=2 expected=2
check diff failed 1-th time, retry later
rpc addr 127.0.0.1:8263 is alive
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-master.test exit...
[Mon Apr 29 14:31:27 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-master.toml >>>>>>
wait process dm-worker.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Mon Apr 29 14:31:27 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl2/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-master.test exit...
process dm-master.test already exit
check diff successfully
dmctl test cmd: "stop-task test"
[Mon Apr 29 14:31:28 CST 2024] <<<<<< finish DM-137 optimistic >>>>>>
wait process dm-worker.test exit...
[Pipeline] sh
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:31:28 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Pipeline] // withEnv
[Pipeline] }
+ pwd
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow
+ ls -alh
total 380K
drwxr-sr-x 19 jenkins jenkins 4.0K Apr 29 14:31 .
drwxr-sr-x 19 jenkins jenkins 4.0K Apr 29 14:31 ..
-rw-r--r--  1 jenkins jenkins 2.0K Apr 29 14:07 .codecov.yml
lrwxrwxrwx  1 jenkins jenkins   10 Apr 29 14:31 .dockerignore -> .gitignore
-rw-r--r--  1 jenkins jenkins  162 Apr 29 14:07 .editorconfig
drwxr-sr-x  7 jenkins jenkins 4.0K Apr 29 14:31 .git
drwxr-sr-x  4 jenkins jenkins 4.0K Apr 29 14:31 .github
-rw-r--r--  1 jenkins jenkins  985 Apr 29 14:07 .gitignore
-rw-r--r--  1 jenkins jenkins 2.8K Apr 29 14:07 .golangci.yml
-rw-r--r--  1 jenkins jenkins  156 Apr 29 14:07 CODE_OF_CONDUCT.md
-rw-r--r--  1 jenkins jenkins 3.5K Apr 29 14:07 CONTRIBUTING.md
-rw-r--r--  1 jenkins jenkins  12K Apr 29 14:07 LICENSE
-rw-r--r--  1 jenkins jenkins  25K Apr 29 14:07 Makefile
-rw-r--r--  1 jenkins jenkins 2.7K Apr 29 14:07 Makefile.engine
-rw-r--r--  1 jenkins jenkins  712 Apr 29 14:07 OWNERS
-rw-r--r--  1 jenkins jenkins 1.5K Apr 29 14:07 README.md
-rw-r--r--  1 jenkins jenkins 2.1K Apr 29 14:07 README_DM.md
-rw-r--r--  1 jenkins jenkins 5.2K Apr 29 14:07 README_Engine.md
-rw-r--r--  1 jenkins jenkins 2.8K Apr 29 14:07 README_TiCDC.md
-rw-r--r--  1 jenkins jenkins 1.5K Apr 29 14:07 SECURITY.md
drwxr-sr-x  3 jenkins jenkins 4.0K Apr 29 14:31 bin
drwxr-sr-x 17 jenkins jenkins 4.0K Apr 29 14:31 cdc
drwxr-sr-x  3 jenkins jenkins 4.0K Apr 29 14:31 cdcv2
drwxr-sr-x 12 jenkins jenkins 4.0K Apr 29 14:31 cmd
drwxr-sr-x  4 jenkins jenkins 4.0K Apr 29 14:31 deployments
drwxr-sr-x 26 jenkins jenkins 4.0K Apr 29 14:31 dm
drwxr-sr-x  7 jenkins jenkins 4.0K Apr 29 14:31 docs
drwxr-sr-x 13 jenkins jenkins 4.0K Apr 29 14:31 engine
-rwxr-xr-x  1 jenkins jenkins  26K Apr 29 14:07 errors.toml
drwxr-sr-x  4 jenkins jenkins 4.0K Apr 29 14:31 examples
-rw-r--r--  1 jenkins jenkins  21K Apr 29 14:07 go.mod
-rw-r--r--  1 jenkins jenkins 152K Apr 29 14:07 go.sum
drwxr-sr-x  4 jenkins jenkins 4.0K Apr 29 14:31 metrics
drwxr-sr-x 45 jenkins jenkins 4.0K Apr 29 14:31 pkg
drwxr-sr-x  5 jenkins jenkins 4.0K Apr 29 14:31 proto
drwxr-sr-x  3 jenkins jenkins 4.0K Apr 29 14:31 scripts
drwxr-sr-x  5 jenkins jenkins 4.0K Apr 29 14:31 tests
-rw-r--r--  1 jenkins jenkins  582 Apr 29 14:07 third-party-license.txt
drwxr-sr-x  4 jenkins jenkins 4.0K Apr 29 14:31 tools
+ set +e
+ for i in '{1..90}'
+ mysqladmin ping -h127.0.0.1 -P 3306 -p123456 -uroot --silent
mysqld is alive
+ '[' 0 -eq 0 ']'
+ set -e
+ break
+ set +e
+ for i in '{1..90}'
+ mysqladmin ping -h127.0.0.1 -P 3307 -p123456 -uroot --silent
mysqld is alive
+ '[' 0 -eq 0 ']'
+ set -e
+ break
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon Apr 29 14:31:29 CST 2024] <<<<<< test case dmctl_basic success! >>>>>>
start running case: [dmctl_command] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
wait process dm-worker.test exit...
[Mon Apr 29 14:31:29 CST 2024] <<<<<< start DM-138 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
[Mon Apr 29 14:31:29 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Pipeline] // stage
dmctl test cmd: "binlog skip test"
dmctl test cmd: "query-status test"
got=3 expected=3
run tidb sql failed 1-th time, retry later
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/start_task/source1.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-task.yaml --remove-meta"
[Pipeline] }
[Pipeline] }
wait process dm-worker.test exit...
dmctl test cmd: "stop-task test"
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:8261 is alive
dmctl test cmd: "query-status test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 0-th time, will retry again
[Mon Apr 29 14:31:30 CST 2024] <<<<<< finish DM-4215 pessimistic >>>>>>
[Mon Apr 29 14:31:30 CST 2024] <<<<<< start DM-4215 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-optimistic.yaml --remove-meta"
got=2 expected=2
[Pipeline] // timeout
rpc addr 127.0.0.1:8261 is alive
exit code should be not zero
exit code should be not zero
[Mon Apr 29 14:31:30 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
[Pipeline] sh
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon Apr 29 14:31:31 CST 2024] <<<<<< test case adjust_gtid success! >>>>>>
start running case: [async_checkpoint_flush] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon Apr 29 14:31:31 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
check diff failed 1-th time, retry later
check diff failed 1-th time, retry later
+ '[' TLS_GROUP == TLS_GROUP ']'
+ echo 'run tls test'
run tls test
+ echo 'copy mysql certs'
copy mysql certs
+ sudo mkdir -p /var/lib/mysql
+ sudo chmod 777 /var/lib/mysql
+ sudo chown -R 1000:1000 /var/lib/mysql
+ sudo cp -r /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/ca-key.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/ca.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/client-cert.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/client-key.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/private_key.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/public_key.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/server-cert.pem /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/mysql-ssl/server-key.pem /var/lib/mysql/
+ sudo chown -R 1000:1000 /var/lib/mysql/ca-key.pem /var/lib/mysql/ca.pem /var/lib/mysql/client-cert.pem /var/lib/mysql/client-key.pem /var/lib/mysql/private_key.pem /var/lib/mysql/public_key.pem /var/lib/mysql/server-cert.pem /var/lib/mysql/server-key.pem
+ ls -alh /var/lib/mysql/
total 40K
drwxrwxrwx 2 jenkins jenkins 4.0K Apr 29 14:31 .
drwxr-xr-x 1 root    root    4.0K Apr 29 14:31 ..
-rw------- 1 jenkins jenkins 1.7K Apr 29 14:31 ca-key.pem
-rw-r--r-- 1 jenkins jenkins 1.1K Apr 29 14:31 ca.pem
-rw-r--r-- 1 jenkins jenkins 1.1K Apr 29 14:31 client-cert.pem
-rw------- 1 jenkins jenkins 1.7K Apr 29 14:31 client-key.pem
-rw------- 1 jenkins jenkins 1.7K Apr 29 14:31 private_key.pem
-rw-r--r-- 1 jenkins jenkins  451 Apr 29 14:31 public_key.pem
-rw-r--r-- 1 jenkins jenkins 1.1K Apr 29 14:31 server-cert.pem
-rw------- 1 jenkins jenkins 1.7K Apr 29 14:31 server-key.pem
+ export PATH=/usr/local/go/bin:/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ PATH=/usr/local/go/bin:/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ mkdir -p ./dm/tests/bin
+ cp -r ./bin/dm-test-tools/check_exit_safe_binlog ./bin/dm-test-tools/check_master_http_apis ./bin/dm-test-tools/check_master_online ./bin/dm-test-tools/check_master_online_http ./bin/dm-test-tools/check_worker_online ./dm/tests/bin/
+ make dm_integration_test_in_group GROUP=TLS_GROUP
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/tidb-server
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/sync_diff_inspector
/usr/bin/mysql
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/minio
install python requirments for test
pip install --user -q -r ./dm/tests/requirements.txt
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8262 is alive
[Mon Apr 29 14:31:32 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
dmctl test cmd: "query-status test"
got=2 expected=2
got=2 expected=2
dmctl test cmd: "binlog skip test"
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:31:32 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 1-th time, will retry again
dmctl test cmd: "query-status test"
got=2 expected=2
got=2 expected=2
dmctl test cmd: "binlog skip test"
dmctl test cmd: "query-status test"
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/dm-master.test
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/dm-worker.test
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/bin/dm-syncer.test
cd dm && ln -sf ../bin .
cd dm && ./tests/run_group.sh TLS_GROUP
Run cases: tls
...
got=3 expected=3
dmctl test cmd: "stop-task test"
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/dmctl_command/wrong-source.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/dmctl_command/source1.yaml -w worker1"
check diff successfully
dmctl test cmd: "stop-task test"
rm: cannot remove '/tmp/dm_test/tidb.toml': No such file or directory
Starting TiDB on port 4000
Verifying TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Mon Apr 29 14:31:33 CST 2024] <<<<<< finish DM-4215 optimistic >>>>>>
dmctl test cmd: "query-status test"
check diff successfully
1 dm-master alive
1 dm-worker alive
dmctl test cmd: "list-member --name worker1"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "operate-source create /tmp/dm_test/dmctl_command/source2.yaml -w wrong-worker"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/async_checkpoint_flush/source1.yaml"
0 dm-syncer alive
dmctl test cmd: "operate-source create /tmp/dm_test/dmctl_command/source2.yaml -w worker1"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/conf/dm-task.yaml "
[Mon Apr 29 14:31:33 CST 2024] <<<<<< finish DM-138 pessimistic >>>>>>
[Mon Apr 29 14:31:33 CST 2024] <<<<<< start DM-138 optimistic >>>>>>
dmctl test cmd: "operate-source create /tmp/dm_test/dmctl_command/source2.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
[Mon Apr 29 14:31:34 CST 2024] <<<<<< start DM-4216 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-pessimistic.yaml --remove-meta"
check diff successfully
wait process dm-master.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 2-th time, will retry again
check diff successfully
PID of insert_data is 2093
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
version()
5.7.26-log
version()
8.0.21
dmctl test cmd: "start-task /tmp/dm_test/dmctl_command/wrong-dm-task.yaml"
dmctl test cmd: "start-task /tmp/dm_test/dmctl_command/wrong-dm-task.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/dmctl_command/conf/dm-task.yaml"
dmctl test cmd: "query-status test"
got=2 expected=2
start running case: [tls] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tls/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tls/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
check diff successfully
dmctl test cmd: "stop-task test"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-task.yaml "
wait process dm-master.test exit...
process dm-master.test already exit
dmctl test cmd: "query-status test"
check diff successfully
dmctl test cmd: "stop-task test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
[Mon Apr 29 14:31:36 CST 2024] <<<<<< finish DM-138 optimistic >>>>>>
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:5043 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
wait process tidb-server exit...
process tidb-server already exit
mysql_ssl_setup at=/var/lib/mysql/
got=1 expected=1
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:5548 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
add dm_tls_test user done /var/lib/mysql/
run a new tidb server with tls
check diff successfully
got=3 expected=3
dmctl test cmd: "stop-task test"
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "validation status test"
got=1 expected=1
got=0 expected=1
command: validation status test "processedRowsStatus": "insert\/update\/delete: 0\/0\/1" count: 0 != expected: 1, failed the 0-th time, will retry again
[Mon Apr 29 14:31:37 CST 2024] <<<<<< finish DM-4216 pessimistic >>>>>>
[Mon Apr 29 14:31:37 CST 2024] <<<<<< start DM-4216 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-optimistic.yaml --remove-meta"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 3-th time, will retry again
[Mon Apr 29 14:31:37 CST 2024] <<<<<< start DM-139 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-worker.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
failpoint=github.com/pingcap/tiflow/dm/pkg/conn/GetSessionVariableFailed=return("sql_mode,1152")
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:6850 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
got=2 expected=2
got=1 expected=1
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:7521 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
[Mon Apr 29 14:31:39 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
got=3 expected=3
dmctl test cmd: "stop-task test"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 4-th time, will retry again
check diff successfully
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
[Mon Apr 29 14:31:39 CST 2024] <<<<<< finish DM-4216 optimistic >>>>>>
[Mon Apr 29 14:31:39 CST 2024] <<<<<< start DM-4216 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-pessimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
process dm-master.test already exit
got=2 expected=2
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:8915 alter table handle_error.tb1 add column c varchar(20);"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 5-th time, will retry again
check diff failed 1-th time, retry later
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:9494 alter table handle_error.tb1 add column c varchar(20);"
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:31:41 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
dmctl test cmd: "query-status test"
got=3 expected=3
[Mon Apr 29 14:31:41 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /tmp/dm_test/tls/dm-master1.toml >>>>>>
[Mon Apr 29 14:31:41 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /tmp/dm_test/tls/dm-master2.toml >>>>>>
[Mon Apr 29 14:31:41 CST 2024] <<<<<< START DM-MASTER on port 8461, config: /tmp/dm_test/tls/dm-master3.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait process dm-worker.test exit...
dmctl test cmd: "stop-task test"
[Mon Apr 29 14:31:42 CST 2024] <<<<<< finish DM-4216 pessimistic >>>>>>
[Mon Apr 29 14:31:42 CST 2024] <<<<<< start DM-4216 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-optimistic.yaml --remove-meta"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/relay_interrupt/1/source1.yaml"
query status, relay log failed
dmctl test cmd: "query-status -s mysql-replica-01"
got=1 expected=1
got=1 expected=1
start task and query status, task and relay have error message
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-task.yaml"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 6-th time, will retry again
wait process dm-worker.test exit...
check diff successfully
dmctl test cmd: "stop-task test"
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:8261 is alive
got=2 expected=2
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:10980 alter table handle_error.tb1 add column c varchar(20);"
rpc addr 127.0.0.1:8361 is alive
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "binlog replace test -b dm-it-2bdb2898-22c1-41e6-9383-02caaa7aca23-qtwz4-z3f03-bin|000001.000001:11467 alter table handle_error.tb1 add column c varchar(20);"
rpc addr 127.0.0.1:8461 is alive
[Mon Apr 29 14:31:44 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /tmp/dm_test/tls/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
failpoint=github.com/pingcap/tiflow/dm/pkg/conn/FetchTargetDoTablesFailed=return(1152)
dmctl test cmd: "query-status test"
got=3 expected=3
dmctl test cmd: "stop-task test"
waiting for asynchronous relay and subtask to be started
[Mon Apr 29 14:31:44 CST 2024] <<<<<< finish DM-4216 optimistic >>>>>>
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/tls/source1.yaml"
[Mon Apr 29 14:31:45 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 7-th time, will retry again
[Mon Apr 29 14:31:45 CST 2024] <<<<<< finish DM-139 pessimistic >>>>>>
[Mon Apr 29 14:31:45 CST 2024] <<<<<< start DM-139 optimistic >>>>>>
[Mon Apr 29 14:31:45 CST 2024] <<<<<< start DM-4219 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-pessimistic.yaml --remove-meta"
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
dmctl test cmd: "query-status -s mysql-replica-01"
dmctl test cmd: "query-status -s mysql-replica-01"
dmctl test cmd: "query-status"
check master alive
dmctl test cmd: "list-member"
reset go failpoints, and need restart dm-worker
then resume task, task will recover success
start task and check stage
dmctl test cmd: "start-task /tmp/dm_test/tls/dm-task.yaml --remove-meta=true"
got=1 expected=1
got=1 expected=1
got=2 expected=2
got=2 expected=2
dmctl test cmd: "query-status test"
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:31:47 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
got=2 expected=2
dmctl test cmd: "binlog replace test -s mysql-replica-01 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
wait process dm-worker.test exit...
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 8-th time, will retry again
got=1 expected=1
dmctl test cmd: "binlog replace test -s mysql-replica-02 alter table handle_error.tb1 add column c varchar(20);"
run tidb sql failed 1-th time, retry later
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/start_task/0/source1.yaml"
wait process dm-worker.test exit...
process dm-worker.test already exit
[Mon Apr 29 14:31:48 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-worker1.toml >>>>>>
dmctl test cmd: "query-status test"
test http and api interface
Release Version: v7.5.1-20-g984cb0405
Git Commit Hash: 984cb040525092f05c913f87c7bb7c78e8f6f39e
Git Branch: HEAD
UTC Build Time: 2024-04-29 06:09:18
Go Version: go version go1.21.6 linux/amd64
Failpoint Build: true

rpc addr 127.0.0.1:8261 is alive
check un-accessible DM-worker exists
dmctl test cmd: "query-status -s 127.0.0.1:8888"
start task and will failed
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-task.yaml"
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
{
  "result": true,
  "msg": "",
  "members": [
    {
      "leader": {
        "msg": "",
        "name": "master1",
        "addr": "127.0.0.1:8261"
      }
    },
    {
      "master": {
        "msg": "",
        "masters": [
          {
            "name": "master1",
            "memberID": "10068700568285020701",
            "alive": true,
            "peerURLs": [
              "https://127.0.0.1:8291"
            ],
            "clientURLs": [
              "https://127.0.0.1:8261"
            ]
          },
          {
            "name": "master2",
            "memberID": "518295880577331676",
            "alive": true,
            "peerURLs": [
              "https://127.0.0.1:8292"
            ],
            "clientURLs": [
              "https://127.0.0.1:8361"
            ]
          },
          {
            "name": "master3",
            "memberID": "6800138986734450252",
            "alive": true,
            "peerURLs": [
              "https://127.0.0.1:8293"
            ],
            "clientURLs": [
              "https://127.0.0.1:8461"
            ]
          }
        ]
      }
    },
    {
      "worker": {
        "msg": "",
        "workers": [
          {
            "name": "worker1",
            "addr": "127.0.0.1:8262",
            "stage": "bound",
            "source": "mysql-replica-01"
          }
        ]
      }
    }
  ]
}
rpc addr 127.0.0.1:8261 is alive
use common name not in 'cert-allowed-cn' should not request success
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
reset go failpoints, and need restart dm-worker, then start task again
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-optimistic.yaml --remove-meta"
got=0 expected=1
command: query-status test because schema conflict detected count: 0 != expected: 1, failed the 9-th time, will retry again
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-worker.test exit...
dmctl test cmd: "stop-task test"
[Mon Apr 29 14:31:50 CST 2024] <<<<<< finish DM-4219 pessimistic >>>>>>
[Mon Apr 29 14:31:50 CST 2024] <<<<<< start DM-4219 optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-optimistic.yaml --remove-meta"
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 3-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "query-status test"
wait process dm-master.test exit...
got=2 expected=2
dmctl test cmd: "binlog replace test -s mysql-replica-01 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "query-status test"
{
    "result": true,
    "msg": "",
    "sources": [
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-01",
                "worker": "worker1",
                "result": null,
                "relayStatus": null
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Paused",
                    "unit": "Sync",
                    "result": {
                        "isCanceled": false,
                        "errors": [
                            {
                                "ErrCode": 42501,
                                "ErrClass": "ha",
                                "ErrScope": "internal",
                                "ErrLevel": "high",
                                "Message": "startLocation: [position: (dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 42228), gtid-set: 9fa4654f-05f0-11ef-97ff-5e87ecb4fbc2:1-194], endLocation: [position: (dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 42353), gtid-set: 9fa4654f-05f0-11ef-97ff-5e87ecb4fbc2:1-195], origin SQL: [alter table shardddl1.tb1 add column b int after a]: fail to do etcd txn operation: txn commit failed",
                                "RawCause": "rpc error: code = Unavailable desc = error reading from server: EOF",
                                "Workaround": "Please check dm-master's node status and the network between this node and dm-master"
                            }
                        ],
                        "detail": null
                    },
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "12",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 42353)",
                        "masterBinlogGtid": "9fa4654f-05f0-11ef-97ff-5e87ecb4fbc2:1-195",
                        "syncerBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 42163)",
                        "syncerBinlogGtid": "9fa4654f-05f0-11ef-97ff-5e87ecb4fbc2:1-194",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "remote",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "12",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        },
        {
            "result": true,
            "msg": "",
            "sourceStatus": {
                "source": "mysql-replica-02",
                "worker": "worker2",
                "result": null,
                "relayStatus": {
                    "masterBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 39206)",
                    "masterBinlogGtid": "a016a324-05f0-11ef-be79-5e87ecb4fbc2:1-167",
                    "relaySubDir": "a016a324-05f0-11ef-be79-5e87ecb4fbc2.000001",
                    "relayBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 39206)",
                    "relayBinlogGtid": "a016a324-05f0-11ef-be79-5e87ecb4fbc2:1-167",
                    "relayCatchUpMaster": true,
                    "stage": "Running",
                    "result": null
                }
            },
            "subTaskStatus": [
                {
                    "name": "test",
                    "stage": "Running",
                    "unit": "Sync",
                    "result": null,
                    "unresolvedDDLLockID": "",
                    "sync": {
                        "totalEvents": "6",
                        "totalTps": "0",
                        "recentTps": "0",
                        "masterBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin.000001, 39206)",
                        "masterBinlogGtid": "a016a324-05f0-11ef-be79-5e87ecb4fbc2:1-167",
                        "syncerBinlog": "(dm-it-b575f754-28da-4502-974c-ebb03787a0d1-9r16f-jshh2-bin|000001.000001, 38926)",
                        "syncerBinlogGtid": "a016a324-05f0-11ef-be79-5e87ecb4fbc2:1-166",
                        "blockingDDLs": [
                        ],
                        "unresolvedGroups": [
                        ],
                        "synced": false,
                        "binlogType": "local",
                        "secondsBehindMaster": "0",
                        "blockDDLOwner": "",
                        "conflictMsg": "",
                        "totalRows": "6",
                        "totalRps": "0",
                        "recentRps": "0"
                    },
                    "validation": null
                }
            ]
        }
    ]
}
PASS
coverage: 4.0% of statements in github.com/pingcap/tiflow/dm/...
curl: (7) Failed connect to 127.0.0.1:8361; Connection refused
curl: (7) Failed connect to 127.0.0.1:8461; Connection refused
curl: (7) Failed connect to 127.0.0.1:8561; Connection refused
curl: (7) Failed connect to 127.0.0.1:8661; Connection refused
curl: (7) Failed connect to 127.0.0.1:8761; Connection refused
curl: (7) Failed connect to 127.0.0.1:8264; Connection refused
curl: (7) Failed connect to 127.0.0.1:18262; Connection refused
curl: (7) Failed connect to 127.0.0.1:18263; Connection refused
make: *** [dm_integration_test_in_group] Error 1
wait for rpc addr 127.0.0.1:8261 alive the 4-th time
got=1 expected=1
dmctl test cmd: "binlog replace test -s mysql-replica-02 alter table handle_error.tb1 add column c varchar(20);"
dmctl test cmd: "stop-task test"
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
Post stage
[Mon Apr 29 14:31:52 CST 2024] <<<<<< finish DM-4219 optimistic >>>>>>
[Pipeline] sh
wait process dm-master.test exit...
+ ls /tmp/dm_test
cov.shardddl1.dmctl.1714371718.688.out
cov.shardddl1.dmctl.1714371719.815.out
cov.shardddl1.dmctl.1714371724.1092.out
cov.shardddl1.dmctl.1714371725.1133.out
cov.shardddl1.dmctl.1714371750.4347.out
cov.shardddl1.dmctl.1714371754.4676.out
cov.shardddl1.dmctl.1714371755.4717.out
cov.shardddl1.dmctl.1714371786.5238.out
cov.shardddl1.dmctl.1714371790.5569.out
cov.shardddl1.dmctl.1714371791.5610.out
cov.shardddl1.dmctl.1714371808.6216.out
cov.shardddl1.dmctl.1714371811.6534.out
cov.shardddl1.dmctl.1714371813.6577.out
cov.shardddl1.dmctl.1714371813.6668.out
cov.shardddl1.dmctl.1714371813.6810.out
cov.shardddl1.dmctl.1714371814.6853.out
cov.shardddl1.dmctl.1714371817.6970.out
cov.shardddl1.dmctl.1714371817.7124.out
cov.shardddl1.dmctl.1714371818.7187.out
cov.shardddl1.dmctl.1714371819.7311.out
cov.shardddl1.dmctl.1714371819.7360.out
cov.shardddl1.dmctl.1714371819.7403.out
cov.shardddl1.dmctl.1714371819.7448.out
cov.shardddl1.dmctl.1714371821.7507.out
cov.shardddl1.dmctl.1714371827.7701.out
cov.shardddl1.dmctl.1714371830.7773.out
cov.shardddl1.dmctl.1714371830.7815.out
cov.shardddl1.dmctl.1714371830.7854.out
cov.shardddl1.dmctl.1714371830.8003.out
cov.shardddl1.dmctl.1714371832.8057.out
cov.shardddl1.dmctl.1714371834.8250.out
cov.shardddl1.dmctl.1714371835.8399.out
cov.shardddl1.dmctl.1714371836.8440.out
cov.shardddl1.dmctl.1714371836.8563.out
cov.shardddl1.dmctl.1714371836.8603.out
cov.shardddl1.dmctl.1714371837.8749.out
cov.shardddl1.dmctl.1714371838.8788.out
cov.shardddl1.dmctl.1714371838.8895.out
cov.shardddl1.dmctl.1714371842.9034.out
cov.shardddl1.dmctl.1714371842.9074.out
cov.shardddl1.dmctl.1714371842.9119.out
cov.shardddl1.dmctl.1714371847.9407.out
cov.shardddl1.dmctl.1714371847.9465.out
cov.shardddl1.dmctl.1714371849.9510.out
cov.shardddl1.dmctl.1714371853.9654.out
cov.shardddl1.dmctl.1714371854.9805.out
cov.shardddl1.dmctl.1714371855.9843.out
cov.shardddl1.dmctl.1714371859.9988.out
cov.shardddl1.dmctl.1714371894.10296.out
cov.shardddl1.dmctl.1714371894.10350.out
cov.shardddl1.dmctl.1714371895.10390.out
cov.shardddl1.dmctl.1714371898.10494.out
cov.shardddl1.dmctl.1714371898.10536.out
cov.shardddl1.dmctl.1714371904.10720.out
cov.shardddl1.dmctl.1714371904.10764.out
cov.shardddl1.dmctl.1714371905.10806.out
cov.shardddl1.dmctl.1714371905.10950.out
cov.shardddl1.dmctl.1714371906.10998.out
cov.shardddl1.dmctl.1714371909.11102.out
cov.shardddl1.dmctl.1714371909.11145.out
cov.shardddl1.dmctl.1714371915.11331.out
cov.shardddl1.dmctl.1714371915.11376.out
cov.shardddl1.dmctl.1714371916.11425.out
cov.shardddl1.dmctl.1714371916.11573.out
cov.shardddl1.dmctl.1714371917.11628.out
cov.shardddl1.dmctl.1714371927.12127.out
cov.shardddl1.dmctl.1714371927.12277.out
cov.shardddl1.dmctl.1714371928.12316.out
cov.shardddl1.dmctl.1714371931.12722.out
cov.shardddl1.dmctl.1714371932.12869.out
cov.shardddl1.dmctl.1714371933.12907.out
cov.shardddl1.dmctl.1714371936.13212.out
cov.shardddl1.dmctl.1714371936.13363.out
cov.shardddl1.dmctl.1714371937.13402.out
cov.shardddl1.dmctl.1714371940.13704.out
cov.shardddl1.dmctl.1714371944.14031.out
cov.shardddl1.dmctl.1714371945.14073.out
cov.shardddl1.dmctl.1714371947.14134.out
cov.shardddl1.dmctl.1714371947.14179.out
cov.shardddl1.dmctl.1714371951.14523.out
cov.shardddl1.dmctl.1714371952.14575.out
cov.shardddl1.dmctl.1714371952.14716.out
cov.shardddl1.dmctl.1714371956.15043.out
cov.shardddl1.dmctl.1714371957.15088.out
cov.shardddl1.dmctl.1714371957.15134.out
cov.shardddl1.dmctl.1714371957.15181.out
cov.shardddl1.dmctl.1714371959.15329.out
cov.shardddl1.dmctl.1714371960.15373.out
cov.shardddl1.dmctl.1714371960.15461.out
cov.shardddl1.dmctl.1714371962.15608.out
cov.shardddl1.dmctl.1714371963.15652.out
cov.shardddl1.dmctl.1714371969.15830.out
cov.shardddl1.dmctl.1714371971.15974.out
cov.shardddl1.dmctl.1714371972.16018.out
cov.shardddl1.dmctl.1714371974.16115.out
cov.shardddl1.dmctl.1714371975.16264.out
cov.shardddl1.dmctl.1714371977.16301.out
cov.shardddl1.dmctl.1714371977.16384.out
cov.shardddl1.master.out
cov.shardddl1.worker.8262.1714371717.out
cov.shardddl1.worker.8262.1714371723.out
cov.shardddl1.worker.8262.1714371753.out
cov.shardddl1.worker.8262.1714371788.out
cov.shardddl1.worker.8262.1714371810.out
cov.shardddl1.worker.8262.1714371943.out
cov.shardddl1.worker.8262.1714371950.out
cov.shardddl1.worker.8262.1714371955.out
cov.shardddl1.worker.8263.1714371718.out
cov.shardddl1.worker.8263.1714371723.out
cov.shardddl1.worker.8263.1714371753.out
cov.shardddl1.worker.8263.1714371788.out
cov.shardddl1.worker.8263.1714371810.out
cov.shardddl1.worker.8263.1714371943.out
cov.shardddl1.worker.8263.1714371950.out
cov.shardddl1.worker.8263.1714371955.out
cov.shardddl1_1.dmctl.1714371987.16878.out
cov.shardddl1_1.dmctl.1714371988.17006.out
cov.shardddl1_1.dmctl.1714371989.17098.out
cov.shardddl1_1.dmctl.1714371991.17145.out
cov.shardddl1_1.dmctl.1714371993.17499.out
cov.shardddl1_1.dmctl.1714371994.17644.out
cov.shardddl1_1.dmctl.1714371995.17702.out
cov.shardddl1_1.dmctl.1714371997.17831.out
cov.shardddl1_1.dmctl.1714371999.17979.out
cov.shardddl1_1.dmctl.1714372000.18027.out
cov.shardddl1_1.dmctl.1714372002.18142.out
cov.shardddl1_1.dmctl.1714372004.18288.out
cov.shardddl1_1.dmctl.1714372005.18333.out
cov.shardddl1_1.dmctl.1714372007.18463.out
cov.shardddl1_1.dmctl.1714372009.18613.out
cov.shardddl1_1.dmctl.1714372010.18664.out
cov.shardddl1_1.dmctl.1714372012.18783.out
cov.shardddl1_1.dmctl.1714372014.18926.out
cov.shardddl1_1.dmctl.1714372015.18967.out
cov.shardddl1_1.dmctl.1714372017.19109.out
cov.shardddl1_1.dmctl.1714372019.19255.out
cov.shardddl1_1.dmctl.1714372020.19296.out
cov.shardddl1_1.dmctl.1714372020.19372.out
cov.shardddl1_1.dmctl.1714372021.19526.out
cov.shardddl1_1.dmctl.1714372023.19571.out
cov.shardddl1_1.dmctl.1714372023.19648.out
cov.shardddl1_1.dmctl.1714372024.19796.out
cov.shardddl1_1.dmctl.1714372025.19837.out
cov.shardddl1_1.dmctl.1714372028.19987.out
cov.shardddl1_1.dmctl.1714372029.20140.out
cov.shardddl1_1.dmctl.1714372030.20180.out
cov.shardddl1_1.dmctl.1714372033.20274.out
cov.shardddl1_1.dmctl.1714372034.20420.out
cov.shardddl1_1.dmctl.1714372035.20461.out
cov.shardddl1_1.dmctl.1714372035.20517.out
cov.shardddl1_1.dmctl.1714372037.20659.out
cov.shardddl1_1.dmctl.1714372038.20698.out
cov.shardddl1_1.dmctl.1714372040.20761.out
cov.shardddl1_1.dmctl.1714372041.20908.out
cov.shardddl1_1.dmctl.1714372043.20944.out
cov.shardddl1_1.dmctl.1714372045.21003.out
cov.shardddl1_1.dmctl.1714372046.21148.out
cov.shardddl1_1.dmctl.1714372047.21184.out
cov.shardddl1_1.dmctl.1714372050.21245.out
cov.shardddl1_1.dmctl.1714372051.21384.out
cov.shardddl1_1.dmctl.1714372052.21422.out
cov.shardddl1_1.dmctl.1714372052.21468.out
cov.shardddl1_1.dmctl.1714372053.21606.out
cov.shardddl1_1.dmctl.1714372054.21647.out
cov.shardddl1_1.dmctl.1714372054.21694.out
cov.shardddl1_1.dmctl.1714372055.21834.out
cov.shardddl1_1.dmctl.1714372057.21874.out
cov.shardddl1_1.dmctl.1714372059.21938.out
cov.shardddl1_1.dmctl.1714372060.22083.out
cov.shardddl1_1.dmctl.1714372061.22122.out
cov.shardddl1_1.dmctl.1714372063.22178.out
cov.shardddl1_1.dmctl.1714372065.22320.out
cov.shardddl1_1.dmctl.1714372066.22358.out
cov.shardddl1_1.dmctl.1714372068.22412.out
cov.shardddl1_1.dmctl.1714372070.22566.out
cov.shardddl1_1.dmctl.1714372071.22609.out
cov.shardddl1_1.dmctl.1714372071.22657.out
cov.shardddl1_1.dmctl.1714372071.22700.out
cov.shardddl1_1.dmctl.1714372073.22849.out
cov.shardddl1_1.dmctl.1714372074.22888.out
cov.shardddl1_1.dmctl.1714372074.22989.out
cov.shardddl1_1.dmctl.1714372076.23133.out
cov.shardddl1_1.dmctl.1714372077.23171.out
cov.shardddl1_1.dmctl.1714372077.23237.out
cov.shardddl1_1.dmctl.1714372077.23272.out
cov.shardddl1_1.dmctl.1714372079.23418.out
cov.shardddl1_1.dmctl.1714372080.23455.out
cov.shardddl1_1.dmctl.1714372080.23496.out
cov.shardddl1_1.dmctl.1714372080.23533.out
cov.shardddl1_1.dmctl.1714372081.23686.out
cov.shardddl1_1.dmctl.1714372083.23723.out
cov.shardddl1_1.dmctl.1714372085.23845.out
cov.shardddl1_1.dmctl.1714372086.23988.out
cov.shardddl1_1.dmctl.1714372087.24019.out
cov.shardddl1_1.dmctl.1714372087.24122.out
cov.shardddl1_1.dmctl.1714372089.24267.out
cov.shardddl1_1.dmctl.1714372090.24304.out
cov.shardddl1_1.dmctl.1714372091.24371.out
cov.shardddl1_1.dmctl.1714372091.24522.out
cov.shardddl1_1.dmctl.1714372093.24569.out
cov.shardddl1_1.dmctl.1714372093.24629.out
cov.shardddl1_1.dmctl.1714372093.24673.out
cov.shardddl1_1.dmctl.1714372094.24826.out
cov.shardddl1_1.dmctl.1714372096.24864.out
cov.shardddl1_1.dmctl.1714372096.24991.out
cov.shardddl1_1.dmctl.1714372097.25141.out
cov.shardddl1_1.dmctl.1714372099.25179.out
cov.shardddl1_1.dmctl.1714372101.25323.out
cov.shardddl1_1.dmctl.1714372103.25471.out
cov.shardddl1_1.dmctl.1714372104.25510.out
cov.shardddl1_1.dmctl.1714372105.25584.out
cov.shardddl1_1.dmctl.1714372105.25622.out
cov.shardddl1_1.dmctl.1714372107.25772.out
cov.shardddl1_1.dmctl.1714372108.25810.out
cov.shardddl1_1.dmctl.1714372110.25883.out
cov.shardddl1_1.dmctl.1714372110.25926.out
cov.shardddl1_1.dmctl.1714372112.26079.out
cov.shardddl1_1.dmctl.1714372113.26115.out
cov.shardddl1_1.dmctl.1714372115.26226.out
cov.shardddl1_1.master.out
cov.shardddl1_1.worker.8262.1714371986.out
cov.shardddl1_1.worker.8263.1714371987.out
cov.shardddl2.dmctl.1714372124.26668.out
cov.shardddl2.dmctl.1714372125.26794.out
cov.shardddl2.dmctl.1714372126.26884.out
cov.shardddl2.dmctl.1714372128.26962.out
cov.shardddl2.dmctl.1714372133.27170.out
cov.shardddl2.dmctl.1714372140.27378.out
cov.shardddl2.dmctl.1714372143.27562.out
cov.shardddl2.dmctl.1714372145.27668.out
cov.shardddl2.dmctl.1714372146.27807.out
cov.shardddl2.dmctl.1714372147.27876.out
cov.shardddl2.dmctl.1714372157.28149.out
cov.shardddl2.dmctl.1714372157.28218.out
cov.shardddl2.dmctl.1714372157.28361.out
cov.shardddl2.dmctl.1714372159.28418.out
cov.shardddl2.dmctl.1714372164.28607.out
cov.shardddl2.dmctl.1714372171.28799.out
cov.shardddl2.dmctl.1714372174.28965.out
cov.shardddl2.dmctl.1714372179.29085.out
cov.shardddl2.dmctl.1714372179.29229.out
cov.shardddl2.dmctl.1714372180.29299.out
cov.shardddl2.dmctl.1714372198.29756.out
cov.shardddl2.dmctl.1714372198.29794.out
cov.shardddl2.dmctl.1714372199.29940.out
cov.shardddl2.dmctl.1714372200.30007.out
cov.shardddl2.dmctl.1714372202.30111.out
cov.shardddl2.dmctl.1714372203.30170.out
cov.shardddl2.dmctl.1714372203.30220.out
cov.shardddl2.dmctl.1714372209.30382.out
cov.shardddl2.dmctl.1714372209.30426.out
cov.shardddl2.dmctl.1714372209.30466.out
cov.shardddl2.dmctl.1714372210.30566.out
cov.shardddl2.dmctl.1714372210.30712.out
cov.shardddl2.dmctl.1714372211.30754.out
cov.shardddl2.dmctl.1714372220.31021.out
cov.shardddl2.dmctl.1714372220.31074.out
cov.shardddl2.dmctl.1714372220.31118.out
cov.shardddl2.dmctl.1714372227.31274.out
cov.shardddl2.dmctl.1714372227.31320.out
cov.shardddl2.dmctl.1714372227.31363.out
cov.shardddl2.dmctl.1714372227.31461.out
cov.shardddl2.dmctl.1714372228.31601.out
cov.shardddl2.dmctl.1714372229.31641.out
cov.shardddl2.dmctl.1714372236.31889.out
cov.shardddl2.dmctl.1714372236.31945.out
cov.shardddl2.dmctl.1714372237.31993.out
cov.shardddl2.dmctl.1714372243.32152.out
cov.shardddl2.dmctl.1714372243.32198.out
cov.shardddl2.dmctl.1714372243.32240.out
cov.shardddl2.dmctl.1714372243.32330.out
cov.shardddl2.dmctl.1714372244.32474.out
cov.shardddl2.dmctl.1714372245.32512.out
cov.shardddl2.dmctl.1714372247.32626.out
cov.shardddl2.dmctl.1714372254.32828.out
cov.shardddl2.dmctl.1714372254.32877.out
cov.shardddl2.dmctl.1714372261.33041.out
cov.shardddl2.dmctl.1714372261.33090.out
cov.shardddl2.dmctl.1714372261.33133.out
cov.shardddl2.dmctl.1714372261.33243.out
cov.shardddl2.dmctl.1714372261.33391.out
cov.shardddl2.dmctl.1714372263.33438.out
cov.shardddl2.dmctl.1714372265.33549.out
cov.shardddl2.dmctl.1714372272.33752.out
cov.shardddl2.dmctl.1714372272.33799.out
cov.shardddl2.dmctl.1714372278.33959.out
cov.shardddl2.dmctl.1714372278.34006.out
cov.shardddl2.dmctl.1714372278.34044.out
cov.shardddl2.dmctl.1714372279.34138.out
cov.shardddl2.dmctl.1714372279.34284.out
cov.shardddl2.dmctl.1714372280.34322.out
cov.shardddl2.dmctl.1714372283.34429.out
cov.shardddl2.dmctl.1714372283.34481.out
cov.shardddl2.dmctl.1714372290.34676.out
cov.shardddl2.master.out
cov.shardddl2.worker.8262.1714372123.out
cov.shardddl2.worker.8262.1714372130.out
cov.shardddl2.worker.8263.1714372124.out
cov.shardddl2.worker.8263.1714372161.out
downstream
goroutines
shardddl1
shardddl1_1
shardddl2
sql_res.shardddl1.txt
sql_res.shardddl1_1.txt
sql_res.shardddl2.txt
tidb.toml
++ find /tmp/dm_test/ -type f -name '*.log'
+ tar -cvzf log-G07.tar.gz /tmp/dm_test/downstream/tidb/log/tidb.log /tmp/dm_test/goroutines/stack/log/master-8361.log /tmp/dm_test/goroutines/stack/log/worker-8262.log /tmp/dm_test/goroutines/stack/log/worker-18263.log /tmp/dm_test/goroutines/stack/log/master-8561.log /tmp/dm_test/goroutines/stack/log/master-8761.log /tmp/dm_test/goroutines/stack/log/master-8261.log /tmp/dm_test/goroutines/stack/log/master-8461.log /tmp/dm_test/goroutines/stack/log/worker-8263.log /tmp/dm_test/goroutines/stack/log/master-8661.log /tmp/dm_test/goroutines/stack/log/worker-18262.log /tmp/dm_test/goroutines/stack/log/worker-8264.log /tmp/dm_test/shardddl1_1/dmctl.1714372002.log /tmp/dm_test/shardddl1_1/dmctl.1714372038.log /tmp/dm_test/shardddl1_1/dmctl.1714372009.log /tmp/dm_test/shardddl1_1/dmctl.1714372014.log /tmp/dm_test/shardddl1_1/worker1/log/stdout.log /tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1714372043.log /tmp/dm_test/shardddl1_1/dmctl.1714372033.log /tmp/dm_test/shardddl1_1/dmctl.1714372066.log /tmp/dm_test/shardddl1_1/dmctl.1714372112.log /tmp/dm_test/shardddl1_1/dmctl.1714371999.log /tmp/dm_test/shardddl1_1/dmctl.1714372000.log /tmp/dm_test/shardddl1_1/dmctl.1714372091.log /tmp/dm_test/shardddl1_1/dmctl.1714372041.log /tmp/dm_test/shardddl1_1/dmctl.1714372047.log /tmp/dm_test/shardddl1_1/dmctl.1714372037.log /tmp/dm_test/shardddl1_1/dmctl.1714372046.log /tmp/dm_test/shardddl1_1/dmctl.1714371997.log /tmp/dm_test/shardddl1_1/dmctl.1714372021.log /tmp/dm_test/shardddl1_1/dmctl.1714372105.log /tmp/dm_test/shardddl1_1/dmctl.1714371993.log /tmp/dm_test/shardddl1_1/dmctl.1714372028.log /tmp/dm_test/shardddl1_1/dmctl.1714372074.log /tmp/dm_test/shardddl1_1/dmctl.1714372012.log /tmp/dm_test/shardddl1_1/master/log/stdout.log /tmp/dm_test/shardddl1_1/master/log/dm-master.log /tmp/dm_test/shardddl1_1/dmctl.1714372080.log /tmp/dm_test/shardddl1_1/dmctl.1714372107.log /tmp/dm_test/shardddl1_1/dmctl.1714371994.log /tmp/dm_test/shardddl1_1/dmctl.1714372057.log /tmp/dm_test/shardddl1_1/dmctl.1714372010.log /tmp/dm_test/shardddl1_1/dmctl.1714372071.log /tmp/dm_test/shardddl1_1/dmctl.1714372005.log /tmp/dm_test/shardddl1_1/dmctl.1714372040.log /tmp/dm_test/shardddl1_1/dmctl.1714372113.log /tmp/dm_test/shardddl1_1/dmctl.1714372090.log /tmp/dm_test/shardddl1_1/dmctl.1714371991.log /tmp/dm_test/shardddl1_1/dmctl.1714372081.log /tmp/dm_test/shardddl1_1/dmctl.1714372076.log /tmp/dm_test/shardddl1_1/dmctl.1714372061.log /tmp/dm_test/shardddl1_1/dmctl.1714372035.log /tmp/dm_test/shardddl1_1/dmctl.1714372034.log /tmp/dm_test/shardddl1_1/dmctl.1714372097.log /tmp/dm_test/shardddl1_1/dmctl.1714372017.log /tmp/dm_test/shardddl1_1/dmctl.1714372086.log /tmp/dm_test/shardddl1_1/dmctl.1714372068.log /tmp/dm_test/shardddl1_1/dmctl.1714372030.log /tmp/dm_test/shardddl1_1/dmctl.1714372065.log /tmp/dm_test/shardddl1_1/dmctl.1714372101.log /tmp/dm_test/shardddl1_1/dmctl.1714372085.log /tmp/dm_test/shardddl1_1/dmctl.1714372015.log /tmp/dm_test/shardddl1_1/dmctl.1714372083.log /tmp/dm_test/shardddl1_1/dmctl.1714371987.log /tmp/dm_test/shardddl1_1/dmctl.1714372059.log /tmp/dm_test/shardddl1_1/dmctl.1714372104.log /tmp/dm_test/shardddl1_1/dmctl.1714372004.log /tmp/dm_test/shardddl1_1/dmctl.1714372024.log /tmp/dm_test/shardddl1_1/dmctl.1714372110.log /tmp/dm_test/shardddl1_1/dmctl.1714372025.log /tmp/dm_test/shardddl1_1/dmctl.1714372108.log /tmp/dm_test/shardddl1_1/dmctl.1714372052.log /tmp/dm_test/shardddl1_1/dmctl.1714372063.log /tmp/dm_test/shardddl1_1/dmctl.1714371995.log /tmp/dm_test/shardddl1_1/dmctl.1714372073.log /tmp/dm_test/shardddl1_1/dmctl.1714372054.log /tmp/dm_test/shardddl1_1/dmctl.1714372023.log /tmp/dm_test/shardddl1_1/dmctl.1714372089.log /tmp/dm_test/shardddl1_1/dmctl.1714371988.log /tmp/dm_test/shardddl1_1/dmctl.1714372093.log /tmp/dm_test/shardddl1_1/dmctl.1714372053.log /tmp/dm_test/shardddl1_1/dmctl.1714372094.log /tmp/dm_test/shardddl1_1/worker2/log/stdout.log /tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1_1/dmctl.1714372020.log /tmp/dm_test/shardddl1_1/dmctl.1714372087.log /tmp/dm_test/shardddl1_1/dmctl.1714372045.log /tmp/dm_test/shardddl1_1/dmctl.1714372007.log /tmp/dm_test/shardddl1_1/dmctl.1714372070.log /tmp/dm_test/shardddl1_1/dmctl.1714372079.log /tmp/dm_test/shardddl1_1/dmctl.1714372019.log /tmp/dm_test/shardddl1_1/dmctl.1714372115.log /tmp/dm_test/shardddl1_1/dmctl.1714372096.log /tmp/dm_test/shardddl1_1/dmctl.1714372055.log /tmp/dm_test/shardddl1_1/dmctl.1714372051.log /tmp/dm_test/shardddl1_1/dmctl.1714372099.log /tmp/dm_test/shardddl1_1/dmctl.1714372029.log /tmp/dm_test/shardddl1_1/sync_diff_stdout.log /tmp/dm_test/shardddl1_1/dmctl.1714372077.log /tmp/dm_test/shardddl1_1/dmctl.1714371989.log /tmp/dm_test/shardddl1_1/dmctl.1714372060.log /tmp/dm_test/shardddl1_1/dmctl.1714372050.log /tmp/dm_test/shardddl1_1/dmctl.1714372103.log /tmp/dm_test/shardddl1/dmctl.1714371814.log /tmp/dm_test/shardddl1/worker1/log/stdout.log /tmp/dm_test/shardddl1/worker1/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1714371790.log /tmp/dm_test/shardddl1/dmctl.1714371718.log /tmp/dm_test/shardddl1/dmctl.1714371915.log /tmp/dm_test/shardddl1/dmctl.1714371909.log /tmp/dm_test/shardddl1/dmctl.1714371837.log /tmp/dm_test/shardddl1/dmctl.1714371937.log /tmp/dm_test/shardddl1/dmctl.1714371977.log /tmp/dm_test/shardddl1/dmctl.1714371849.log /tmp/dm_test/shardddl1/dmctl.1714371819.log /tmp/dm_test/shardddl1/master/log/stdout.log /tmp/dm_test/shardddl1/master/log/dm-master.log /tmp/dm_test/shardddl1/dmctl.1714371974.log /tmp/dm_test/shardddl1/dmctl.1714371834.log /tmp/dm_test/shardddl1/dmctl.1714371952.log /tmp/dm_test/shardddl1/dmctl.1714371905.log /tmp/dm_test/shardddl1/dmctl.1714371971.log /tmp/dm_test/shardddl1/dmctl.1714371725.log /tmp/dm_test/shardddl1/dmctl.1714371754.log /tmp/dm_test/shardddl1/dmctl.1714371917.log /tmp/dm_test/shardddl1/dmctl.1714371859.log /tmp/dm_test/shardddl1/dmctl.1714371960.log /tmp/dm_test/shardddl1/dmctl.1714371719.log /tmp/dm_test/shardddl1/dmctl.1714371895.log /tmp/dm_test/shardddl1/dmctl.1714371972.log /tmp/dm_test/shardddl1/dmctl.1714371821.log /tmp/dm_test/shardddl1/dmctl.1714371969.log /tmp/dm_test/shardddl1/dmctl.1714371916.log /tmp/dm_test/shardddl1/dmctl.1714371963.log /tmp/dm_test/shardddl1/dmctl.1714371818.log /tmp/dm_test/shardddl1/dmctl.1714371945.log /tmp/dm_test/shardddl1/dmctl.1714371959.log /tmp/dm_test/shardddl1/dmctl.1714371832.log /tmp/dm_test/shardddl1/dmctl.1714371808.log /tmp/dm_test/shardddl1/dmctl.1714371827.log /tmp/dm_test/shardddl1/dmctl.1714371928.log /tmp/dm_test/shardddl1/dmctl.1714371931.log /tmp/dm_test/shardddl1/dmctl.1714371904.log /tmp/dm_test/shardddl1/dmctl.1714371894.log /tmp/dm_test/shardddl1/dmctl.1714371853.log /tmp/dm_test/shardddl1/dmctl.1714371811.log /tmp/dm_test/shardddl1/dmctl.1714371755.log /tmp/dm_test/shardddl1/dmctl.1714371944.log /tmp/dm_test/shardddl1/dmctl.1714371813.log /tmp/dm_test/shardddl1/dmctl.1714371724.log /tmp/dm_test/shardddl1/dmctl.1714371847.log /tmp/dm_test/shardddl1/dmctl.1714371791.log /tmp/dm_test/shardddl1/dmctl.1714371898.log /tmp/dm_test/shardddl1/dmctl.1714371836.log /tmp/dm_test/shardddl1/worker2/log/stdout.log /tmp/dm_test/shardddl1/worker2/log/dm-worker.log /tmp/dm_test/shardddl1/dmctl.1714371947.log /tmp/dm_test/shardddl1/dmctl.1714371957.log /tmp/dm_test/shardddl1/dmctl.1714371750.log /tmp/dm_test/shardddl1/dmctl.1714371906.log /tmp/dm_test/shardddl1/dmctl.1714371835.log /tmp/dm_test/shardddl1/dmctl.1714371975.log /tmp/dm_test/shardddl1/dmctl.1714371933.log /tmp/dm_test/shardddl1/dmctl.1714371842.log /tmp/dm_test/shardddl1/dmctl.1714371940.log /tmp/dm_test/shardddl1/dmctl.1714371817.log /tmp/dm_test/shardddl1/dmctl.1714371830.log /tmp/dm_test/shardddl1/dmctl.1714371855.log /tmp/dm_test/shardddl1/dmctl.1714371936.log /tmp/dm_test/shardddl1/dmctl.1714371786.log /tmp/dm_test/shardddl1/sync_diff_stdout.log /tmp/dm_test/shardddl1/dmctl.1714371962.log /tmp/dm_test/shardddl1/dmctl.1714371932.log /tmp/dm_test/shardddl1/dmctl.1714371951.log /tmp/dm_test/shardddl1/dmctl.1714371927.log /tmp/dm_test/shardddl1/dmctl.1714371956.log /tmp/dm_test/shardddl1/dmctl.1714371838.log /tmp/dm_test/shardddl1/dmctl.1714371854.log /tmp/dm_test/shardddl2/dmctl.1714372145.log /tmp/dm_test/shardddl2/dmctl.1714372174.log /tmp/dm_test/shardddl2/worker1/log/stdout.log /tmp/dm_test/shardddl2/worker1/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1714372210.log /tmp/dm_test/shardddl2/dmctl.1714372290.log /tmp/dm_test/shardddl2/dmctl.1714372236.log /tmp/dm_test/shardddl2/dmctl.1714372203.log /tmp/dm_test/shardddl2/master/log/stdout.log /tmp/dm_test/shardddl2/master/log/dm-master.log /tmp/dm_test/shardddl2/dmctl.1714372140.log /tmp/dm_test/shardddl2/dmctl.1714372202.log /tmp/dm_test/shardddl2/dmctl.1714372124.log /tmp/dm_test/shardddl2/dmctl.1714372179.log /tmp/dm_test/shardddl2/dmctl.1714372237.log /tmp/dm_test/shardddl2/dmctl.1714372133.log /tmp/dm_test/shardddl2/dmctl.1714372209.log /tmp/dm_test/shardddl2/dmctl.1714372126.log /tmp/dm_test/shardddl2/dmctl.1714372229.log /tmp/dm_test/shardddl2/dmctl.1714372125.log /tmp/dm_test/shardddl2/dmctl.1714372244.log /tmp/dm_test/shardddl2/dmctl.1714372278.log /tmp/dm_test/shardddl2/dmctl.1714372247.log /tmp/dm_test/shardddl2/dmctl.1714372245.log /tmp/dm_test/shardddl2/dmctl.1714372272.log /tmp/dm_test/shardddl2/dmctl.1714372265.log /tmp/dm_test/shardddl2/dmctl.1714372199.log /tmp/dm_test/shardddl2/dmctl.1714372211.log /tmp/dm_test/shardddl2/dmctl.1714372171.log /tmp/dm_test/shardddl2/dmctl.1714372227.log /tmp/dm_test/shardddl2/dmctl.1714372263.log /tmp/dm_test/shardddl2/dmctl.1714372220.log /tmp/dm_test/shardddl2/dmctl.1714372143.log /tmp/dm_test/shardddl2/dmctl.1714372261.log /tmp/dm_test/shardddl2/dmctl.1714372164.log /tmp/dm_test/shardddl2/dmctl.1714372243.log /tmp/dm_test/shardddl2/worker2/log/stdout.log /tmp/dm_test/shardddl2/worker2/log/dm-worker.log /tmp/dm_test/shardddl2/dmctl.1714372146.log /tmp/dm_test/shardddl2/dmctl.1714372279.log /tmp/dm_test/shardddl2/dmctl.1714372200.log /tmp/dm_test/shardddl2/dmctl.1714372159.log /tmp/dm_test/shardddl2/sync_diff_stdout.log /tmp/dm_test/shardddl2/dmctl.1714372147.log /tmp/dm_test/shardddl2/dmctl.1714372128.log /tmp/dm_test/shardddl2/dmctl.1714372280.log /tmp/dm_test/shardddl2/dmctl.1714372198.log /tmp/dm_test/shardddl2/dmctl.1714372228.log /tmp/dm_test/shardddl2/dmctl.1714372254.log /tmp/dm_test/shardddl2/dmctl.1714372180.log /tmp/dm_test/shardddl2/dmctl.1714372283.log /tmp/dm_test/shardddl2/dmctl.1714372157.log
tar: Removing leading `/' from member names
/tmp/dm_test/downstream/tidb/log/tidb.log
/tmp/dm_test/goroutines/stack/log/master-8361.log
/tmp/dm_test/goroutines/stack/log/worker-8262.log
/tmp/dm_test/goroutines/stack/log/worker-18263.log
/tmp/dm_test/goroutines/stack/log/master-8561.log
/tmp/dm_test/goroutines/stack/log/master-8761.log
/tmp/dm_test/goroutines/stack/log/master-8261.log
/tmp/dm_test/goroutines/stack/log/master-8461.log
/tmp/dm_test/goroutines/stack/log/worker-8263.log
/tmp/dm_test/goroutines/stack/log/master-8661.log
/tmp/dm_test/goroutines/stack/log/worker-18262.log
/tmp/dm_test/goroutines/stack/log/worker-8264.log
/tmp/dm_test/shardddl1_1/dmctl.1714372002.log
/tmp/dm_test/shardddl1_1/dmctl.1714372038.log
/tmp/dm_test/shardddl1_1/dmctl.1714372009.log
/tmp/dm_test/shardddl1_1/dmctl.1714372014.log
/tmp/dm_test/shardddl1_1/worker1/log/stdout.log
/tmp/dm_test/shardddl1_1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1714372043.log
/tmp/dm_test/shardddl1_1/dmctl.1714372033.log
/tmp/dm_test/shardddl1_1/dmctl.1714372066.log
/tmp/dm_test/shardddl1_1/dmctl.1714372112.log
/tmp/dm_test/shardddl1_1/dmctl.1714371999.log
/tmp/dm_test/shardddl1_1/dmctl.1714372000.log
/tmp/dm_test/shardddl1_1/dmctl.1714372091.log
/tmp/dm_test/shardddl1_1/dmctl.1714372041.log
/tmp/dm_test/shardddl1_1/dmctl.1714372047.log
/tmp/dm_test/shardddl1_1/dmctl.1714372037.log
/tmp/dm_test/shardddl1_1/dmctl.1714372046.log
/tmp/dm_test/shardddl1_1/dmctl.1714371997.log
/tmp/dm_test/shardddl1_1/dmctl.1714372021.log
/tmp/dm_test/shardddl1_1/dmctl.1714372105.log
/tmp/dm_test/shardddl1_1/dmctl.1714371993.log
/tmp/dm_test/shardddl1_1/dmctl.1714372028.log
/tmp/dm_test/shardddl1_1/dmctl.1714372074.log
/tmp/dm_test/shardddl1_1/dmctl.1714372012.log
/tmp/dm_test/shardddl1_1/master/log/stdout.log
/tmp/dm_test/shardddl1_1/master/log/dm-master.log
/tmp/dm_test/shardddl1_1/dmctl.1714372080.log
/tmp/dm_test/shardddl1_1/dmctl.1714372107.log
/tmp/dm_test/shardddl1_1/dmctl.1714371994.log
/tmp/dm_test/shardddl1_1/dmctl.1714372057.log
/tmp/dm_test/shardddl1_1/dmctl.1714372010.log
/tmp/dm_test/shardddl1_1/dmctl.1714372071.log
/tmp/dm_test/shardddl1_1/dmctl.1714372005.log
/tmp/dm_test/shardddl1_1/dmctl.1714372040.log
/tmp/dm_test/shardddl1_1/dmctl.1714372113.log
/tmp/dm_test/shardddl1_1/dmctl.1714372090.log
/tmp/dm_test/shardddl1_1/dmctl.1714371991.log
/tmp/dm_test/shardddl1_1/dmctl.1714372081.log
/tmp/dm_test/shardddl1_1/dmctl.1714372076.log
/tmp/dm_test/shardddl1_1/dmctl.1714372061.log
/tmp/dm_test/shardddl1_1/dmctl.1714372035.log
/tmp/dm_test/shardddl1_1/dmctl.1714372034.log
/tmp/dm_test/shardddl1_1/dmctl.1714372097.log
/tmp/dm_test/shardddl1_1/dmctl.1714372017.log
/tmp/dm_test/shardddl1_1/dmctl.1714372086.log
/tmp/dm_test/shardddl1_1/dmctl.1714372068.log
/tmp/dm_test/shardddl1_1/dmctl.1714372030.log
/tmp/dm_test/shardddl1_1/dmctl.1714372065.log
/tmp/dm_test/shardddl1_1/dmctl.1714372101.log
/tmp/dm_test/shardddl1_1/dmctl.1714372085.log
/tmp/dm_test/shardddl1_1/dmctl.1714372015.log
/tmp/dm_test/shardddl1_1/dmctl.1714372083.log
/tmp/dm_test/shardddl1_1/dmctl.1714371987.log
/tmp/dm_test/shardddl1_1/dmctl.1714372059.log
/tmp/dm_test/shardddl1_1/dmctl.1714372104.log
/tmp/dm_test/shardddl1_1/dmctl.1714372004.log
/tmp/dm_test/shardddl1_1/dmctl.1714372024.log
/tmp/dm_test/shardddl1_1/dmctl.1714372110.log
/tmp/dm_test/shardddl1_1/dmctl.1714372025.log
/tmp/dm_test/shardddl1_1/dmctl.1714372108.log
/tmp/dm_test/shardddl1_1/dmctl.1714372052.log
/tmp/dm_test/shardddl1_1/dmctl.1714372063.log
/tmp/dm_test/shardddl1_1/dmctl.1714371995.log
/tmp/dm_test/shardddl1_1/dmctl.1714372073.log
/tmp/dm_test/shardddl1_1/dmctl.1714372054.log
/tmp/dm_test/shardddl1_1/dmctl.1714372023.log
/tmp/dm_test/shardddl1_1/dmctl.1714372089.log
/tmp/dm_test/shardddl1_1/dmctl.1714371988.log
/tmp/dm_test/shardddl1_1/dmctl.1714372093.log
/tmp/dm_test/shardddl1_1/dmctl.1714372053.log
/tmp/dm_test/shardddl1_1/dmctl.1714372094.log
/tmp/dm_test/shardddl1_1/worker2/log/stdout.log
/tmp/dm_test/shardddl1_1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1_1/dmctl.1714372020.log
/tmp/dm_test/shardddl1_1/dmctl.1714372087.log
/tmp/dm_test/shardddl1_1/dmctl.1714372045.log
/tmp/dm_test/shardddl1_1/dmctl.1714372007.log
/tmp/dm_test/shardddl1_1/dmctl.1714372070.log
/tmp/dm_test/shardddl1_1/dmctl.1714372079.log
/tmp/dm_test/shardddl1_1/dmctl.1714372019.log
/tmp/dm_test/shardddl1_1/dmctl.1714372115.log
/tmp/dm_test/shardddl1_1/dmctl.1714372096.log
/tmp/dm_test/shardddl1_1/dmctl.1714372055.log
/tmp/dm_test/shardddl1_1/dmctl.1714372051.log
/tmp/dm_test/shardddl1_1/dmctl.1714372099.log
/tmp/dm_test/shardddl1_1/dmctl.1714372029.log
/tmp/dm_test/shardddl1_1/sync_diff_stdout.log
/tmp/dm_test/shardddl1_1/dmctl.1714372077.log
/tmp/dm_test/shardddl1_1/dmctl.1714371989.log
/tmp/dm_test/shardddl1_1/dmctl.1714372060.log
/tmp/dm_test/shardddl1_1/dmctl.1714372050.log
/tmp/dm_test/shardddl1_1/dmctl.1714372103.log
/tmp/dm_test/shardddl1/dmctl.1714371814.log
/tmp/dm_test/shardddl1/worker1/log/stdout.log
/tmp/dm_test/shardddl1/worker1/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1714371790.log
/tmp/dm_test/shardddl1/dmctl.1714371718.log
/tmp/dm_test/shardddl1/dmctl.1714371915.log
/tmp/dm_test/shardddl1/dmctl.1714371909.log
/tmp/dm_test/shardddl1/dmctl.1714371837.log
/tmp/dm_test/shardddl1/dmctl.1714371937.log
/tmp/dm_test/shardddl1/dmctl.1714371977.log
/tmp/dm_test/shardddl1/dmctl.1714371849.log
/tmp/dm_test/shardddl1/dmctl.1714371819.log
/tmp/dm_test/shardddl1/master/log/stdout.log
/tmp/dm_test/shardddl1/master/log/dm-master.log
/tmp/dm_test/shardddl1/dmctl.1714371974.log
/tmp/dm_test/shardddl1/dmctl.1714371834.log
/tmp/dm_test/shardddl1/dmctl.1714371952.log
/tmp/dm_test/shardddl1/dmctl.1714371905.log
/tmp/dm_test/shardddl1/dmctl.1714371971.log
/tmp/dm_test/shardddl1/dmctl.1714371725.log
/tmp/dm_test/shardddl1/dmctl.1714371754.log
/tmp/dm_test/shardddl1/dmctl.1714371917.log
/tmp/dm_test/shardddl1/dmctl.1714371859.log
/tmp/dm_test/shardddl1/dmctl.1714371960.log
/tmp/dm_test/shardddl1/dmctl.1714371719.log
/tmp/dm_test/shardddl1/dmctl.1714371895.log
/tmp/dm_test/shardddl1/dmctl.1714371972.log
/tmp/dm_test/shardddl1/dmctl.1714371821.log
/tmp/dm_test/shardddl1/dmctl.1714371969.log
/tmp/dm_test/shardddl1/dmctl.1714371916.log
/tmp/dm_test/shardddl1/dmctl.1714371963.log
/tmp/dm_test/shardddl1/dmctl.1714371818.log
/tmp/dm_test/shardddl1/dmctl.1714371945.log
/tmp/dm_test/shardddl1/dmctl.1714371959.log
/tmp/dm_test/shardddl1/dmctl.1714371832.log
/tmp/dm_test/shardddl1/dmctl.1714371808.log
/tmp/dm_test/shardddl1/dmctl.1714371827.log
/tmp/dm_test/shardddl1/dmctl.1714371928.log
/tmp/dm_test/shardddl1/dmctl.1714371931.log
/tmp/dm_test/shardddl1/dmctl.1714371904.log
/tmp/dm_test/shardddl1/dmctl.1714371894.log
/tmp/dm_test/shardddl1/dmctl.1714371853.log
/tmp/dm_test/shardddl1/dmctl.1714371811.log
/tmp/dm_test/shardddl1/dmctl.1714371755.log
/tmp/dm_test/shardddl1/dmctl.1714371944.log
/tmp/dm_test/shardddl1/dmctl.1714371813.log
/tmp/dm_test/shardddl1/dmctl.1714371724.log
/tmp/dm_test/shardddl1/dmctl.1714371847.log
/tmp/dm_test/shardddl1/dmctl.1714371791.log
/tmp/dm_test/shardddl1/dmctl.1714371898.log
/tmp/dm_test/shardddl1/dmctl.1714371836.log
/tmp/dm_test/shardddl1/worker2/log/stdout.log
/tmp/dm_test/shardddl1/worker2/log/dm-worker.log
/tmp/dm_test/shardddl1/dmctl.1714371947.log
/tmp/dm_test/shardddl1/dmctl.1714371957.log
/tmp/dm_test/shardddl1/dmctl.1714371750.log
/tmp/dm_test/shardddl1/dmctl.1714371906.log
/tmp/dm_test/shardddl1/dmctl.1714371835.log
/tmp/dm_test/shardddl1/dmctl.1714371975.log
/tmp/dm_test/shardddl1/dmctl.1714371933.log
/tmp/dm_test/shardddl1/dmctl.1714371842.log
/tmp/dm_test/shardddl1/dmctl.1714371940.log
/tmp/dm_test/shardddl1/dmctl.1714371817.log
/tmp/dm_test/shardddl1/dmctl.1714371830.log
/tmp/dm_test/shardddl1/dmctl.1714371855.log
/tmp/dm_test/shardddl1/dmctl.1714371936.log
/tmp/dm_test/shardddl1/dmctl.1714371786.log
/tmp/dm_test/shardddl1/sync_diff_stdout.log
/tmp/dm_test/shardddl1/dmctl.1714371962.log
/tmp/dm_test/shardddl1/dmctl.1714371932.log
/tmp/dm_test/shardddl1/dmctl.1714371951.log
/tmp/dm_test/shardddl1/dmctl.1714371927.log
/tmp/dm_test/shardddl1/dmctl.1714371956.log
/tmp/dm_test/shardddl1/dmctl.1714371838.log
/tmp/dm_test/shardddl1/dmctl.1714371854.log
/tmp/dm_test/shardddl2/dmctl.1714372145.log
/tmp/dm_test/shardddl2/dmctl.1714372174.log
/tmp/dm_test/shardddl2/worker1/log/stdout.log
/tmp/dm_test/shardddl2/worker1/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1714372210.log
/tmp/dm_test/shardddl2/dmctl.1714372290.log
/tmp/dm_test/shardddl2/dmctl.1714372236.log
/tmp/dm_test/shardddl2/dmctl.1714372203.log
/tmp/dm_test/shardddl2/master/log/stdout.log
/tmp/dm_test/shardddl2/master/log/dm-master.log
/tmp/dm_test/shardddl2/dmctl.1714372140.log
/tmp/dm_test/shardddl2/dmctl.1714372202.log
/tmp/dm_test/shardddl2/dmctl.1714372124.log
/tmp/dm_test/shardddl2/dmctl.1714372179.log
/tmp/dm_test/shardddl2/dmctl.1714372237.log
/tmp/dm_test/shardddl2/dmctl.1714372133.log
/tmp/dm_test/shardddl2/dmctl.1714372209.log
/tmp/dm_test/shardddl2/dmctl.1714372126.log
/tmp/dm_test/shardddl2/dmctl.1714372229.log
/tmp/dm_test/shardddl2/dmctl.1714372125.log
/tmp/dm_test/shardddl2/dmctl.1714372244.log
/tmp/dm_test/shardddl2/dmctl.1714372278.log
/tmp/dm_test/shardddl2/dmctl.1714372247.log
/tmp/dm_test/shardddl2/dmctl.1714372245.log
/tmp/dm_test/shardddl2/dmctl.1714372272.log
/tmp/dm_test/shardddl2/dmctl.1714372265.log
/tmp/dm_test/shardddl2/dmctl.1714372199.log
/tmp/dm_test/shardddl2/dmctl.1714372211.log
/tmp/dm_test/shardddl2/dmctl.1714372171.log
/tmp/dm_test/shardddl2/dmctl.1714372227.log
/tmp/dm_test/shardddl2/dmctl.1714372263.log
/tmp/dm_test/shardddl2/dmctl.1714372220.log
/tmp/dm_test/shardddl2/dmctl.1714372143.log
/tmp/dm_test/shardddl2/dmctl.1714372261.log
/tmp/dm_test/shardddl2/dmctl.1714372164.log
/tmp/dm_test/shardddl2/dmctl.1714372243.log
/tmp/dm_test/shardddl2/worker2/log/stdout.log
/tmp/dm_test/shardddl2/worker2/log/dm-worker.log
/tmp/dm_test/shardddl2/dmctl.1714372146.log
/tmp/dm_test/shardddl2/dmctl.1714372279.log
/tmp/dm_test/shardddl2/dmctl.1714372200.log
/tmp/dm_test/shardddl2/dmctl.1714372159.log
/tmp/dm_test/shardddl2/sync_diff_stdout.log
/tmp/dm_test/shardddl2/dmctl.1714372147.log
/tmp/dm_test/shardddl2/dmctl.1714372128.log
/tmp/dm_test/shardddl2/dmctl.1714372280.log
/tmp/dm_test/shardddl2/dmctl.1714372198.log
/tmp/dm_test/shardddl2/dmctl.1714372228.log
/tmp/dm_test/shardddl2/dmctl.1714372254.log
/tmp/dm_test/shardddl2/dmctl.1714372180.log
/tmp/dm_test/shardddl2/dmctl.1714372283.log
/tmp/dm_test/shardddl2/dmctl.1714372157.log
+ ls -alh log-G07.tar.gz
-rw-r--r--. 1 jenkins jenkins 666K Apr 29 14:31 log-G07.tar.gz
[Pipeline] archiveArtifacts
Archiving artifacts
wait for rpc addr 127.0.0.1:8261 alive the 5-th time
[Pipeline] }
wait process dm-master.test exit...
process dm-master.test already exit
[Mon Apr 29 14:31:52 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
[Pipeline] // withCredentials
[Pipeline] }
[Mon Apr 29 14:31:53 CST 2024] <<<<<< start DM-4220 pessimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/handle_error_2/conf/double-source-pessimistic.yaml --remove-meta"
[Pipeline] // timeout
[Pipeline] }
wait process dm-master.test exit...
[Pipeline] // stage
[Pipeline] }
wait for rpc addr 127.0.0.1:8261 alive the 6-th time
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
dmctl test cmd: "query-status test"
Sending interrupt signal to process
Killing processes
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 7-th time
got=2 expected=2
dmctl test cmd: "query-status test"
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
got=2 expected=2
dmctl test cmd: "binlog revert test"
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
wait process dm-master.test exit...
dmctl test cmd: "query-status test"
got=2 expected=2
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:31:55 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 8-th time
rpc addr 127.0.0.1:8262 is alive
wait for rpc addr 127.0.0.1:8261 alive the 9-th time
start task after restarted dm-worker
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/relay_interrupt/conf/dm-task.yaml"
dmctl test cmd: "query-status test"
got=1 expected=1
check diff successfully
read binlog from relay log failed, and will use remote binlog
wait for rpc addr 127.0.0.1:8261 alive the 10-th time
check diff successfully
dmctl test cmd: "stop-task test"
wait for rpc addr 127.0.0.1:8261 alive the 11-th time
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
wait process dm-worker.test exit...
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
wait for rpc addr 127.0.0.1:8261 alive the 12-th time
wait for rpc addr 127.0.0.1:8261 alive the 13-th time
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-task.yaml "
wait for rpc addr 127.0.0.1:8261 alive the 14-th time
check diff successfully
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait for rpc addr 127.0.0.1:8261 alive the 15-th time
wait process dm-master.test exit...
wait for rpc addr 127.0.0.1:8261 alive the 16-th time
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
[Mon Apr 29 14:32:05 CST 2024] <<<<<< finish DM-139 optimistic >>>>>>
[Mon Apr 29 14:32:06 CST 2024] <<<<<< start DM-142 pessimistic >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 17-th time
wait process dm-worker.test exit...
check diff successfully
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/async_checkpoint_flush/run.sh: line 29:  2093 Terminated              insert_data
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait for rpc addr 127.0.0.1:8261 alive the 18-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
failpoint=github.com/pingcap/tiflow/dm/pkg/conn/FetchAllDoTablesFailed=return(1152)
wait process dm-worker.test exit...
[Mon Apr 29 14:32:08 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 19-th time
wait process dm-worker.test exit...
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:32:09 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 20-th time
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon Apr 29 14:32:10 CST 2024] <<<<<< test case async_checkpoint_flush success! >>>>>>
start running case: [binlog_parse] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/binlog_parse/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/binlog_parse/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Mon Apr 29 14:32:10 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/binlog_parse/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 check alive timeout
check data
check diff successfully
============================== test_master_ha_when_enable_tidb_and_only_ca_source_tls success ==================================
3 dm-master alive
1 dm-worker alive
0 dm-syncer alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl4_1/conf/double-source-pessimistic.yaml --remove-meta"
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/start_task/1/source1.yaml"
check un-accessible DM-worker exists
dmctl test cmd: "query-status -s 127.0.0.1:8888"
start task and will failed
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/conf/dm-task.yaml"
reset go failpoints, and need restart dm-worker, then start task again
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
wait process dm-master.test exit...
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
dmctl test cmd: "query-status test"
got=2 expected=2
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
rpc addr 127.0.0.1:8261 is alive
[Mon Apr 29 14:32:12 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/binlog_parse/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
wait process dm-worker.test exit...
make: *** [dm_integration_test_in_group] Terminated
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/start_task/../_utils/test_prepare: line 232:  5757 Terminated              wait_process_exit $keyword
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
script returned exit code 143
wait process dm-master.test exit...
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/binlog_parse/source1.yaml"
prepare data
start task
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/binlog_parse/conf/dm-task.yaml --remove-meta"
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
kill finished with exit code 0
make: *** [dm_integration_test_in_group] Terminated
script returned exit code 143
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G00'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G02'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G03'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G06'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G10'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'TLS_GROUP'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 2
Finished: FAILURE