Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm Agent pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-gw343-ttj1h --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3fd4f18a17a8d7c5dcedb1b33f27894419d458e8" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-gw343" name: "pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 0 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=bbd43f0d-3764-4728-b3a0-9fa11e8449d3 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-gw343 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-ff467ea3dec646dcb34a434b6d5ddeaf65f47c579492826166e1422a0f0cf5b0 NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-gw343 pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.6' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build4016370714=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-test-1781-gw343-ttj1h-6f7wm bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 2085 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: e25983da9d4aed91 (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 25 IN A 20.205.243.166 ;; Query time: 0 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Mon May 06 06:19:56 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10953/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10953/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10953/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10953/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/processor/sinkmanager/manager.go, cdc/processor/sinkmanager/manager_test.go, cdc/processor/sinkmanager/redo_log_worker.go, cdc/processor/sinkmanager/redo_log_worker_test.go, cdc/processor/sinkmanager/table_sink_worker.go, cdc/processor/sinkmanager/table_sink_worker_test.go, cdc/processor/sinkmanager/table_sink_wrapper.go, cdc/processor/sinkmanager/tasks.go, cdc/processor/sinkmanager/tasks_test.go, cdc/processor/sourcemanager/manager.go, cdc/processor/sourcemanager/sorter/engine.go, cdc/processor/sourcemanager/sorter/memory/event_sorter.go, cdc/processor/sourcemanager/sorter/memory/event_sorter_test.go, cdc/processor/sourcemanager/sorter/pebble/encoding/key.go, cdc/processor/sourcemanager/sorter/pebble/event_sorter.go, cdc/processor/sourcemanager/sorter/pebble/event_sorter_test.go] [Pipeline] echo diff file not matched: cdc/processor/sinkmanager/manager.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-be15534) 203635712 bytes in 1.06 secs (191291900 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/.git/ .git HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) POST git-upload-pack (656 bytes) POST git-upload-pack (973 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master b0ad7c037..bd37097ad refs/pull/10953/head -> origin/pr/10953/head HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) ๐Ÿšง Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224... HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) โœ… Checked. ๐ŸŽ‰ ๐Ÿงพ HEAD info: be1553484fe4c03594eabb8d7435c694e5fd7224 be1553484 codec(ticdc): avro simplify the unit test (#11010) 2a7a65c6f Support Sequences (#10203) 36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005) ๐Ÿšง Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ... Updating be1553484..bd37097ad Fast-forward cdc/processor/sinkmanager/manager.go | 48 +++----- cdc/processor/sinkmanager/manager_test.go | 2 +- cdc/processor/sinkmanager/redo_log_worker.go | 11 +- cdc/processor/sinkmanager/redo_log_worker_test.go | 4 +- cdc/processor/sinkmanager/table_sink_worker.go | 14 ++- .../sinkmanager/table_sink_worker_test.go | 16 +-- cdc/processor/sinkmanager/table_sink_wrapper.go | 4 +- cdc/processor/sinkmanager/tasks.go | 14 +++ cdc/processor/sinkmanager/tasks_test.go | 15 +++ cdc/processor/sourcemanager/manager.go | 13 ++- cdc/processor/sourcemanager/sorter/engine.go | 18 ++- .../sourcemanager/sorter/memory/event_sorter.go | 4 +- .../sorter/memory/event_sorter_test.go | 3 +- .../sourcemanager/sorter/pebble/encoding/key.go | 129 +++++++++++++-------- .../sourcemanager/sorter/pebble/event_sorter.go | 20 +++- .../sorter/pebble/event_sorter_test.go | 6 +- 16 files changed, 210 insertions(+), 111 deletions(-) ๐Ÿงพ Pre-merged result: bd37097adb9743a2e37a9b3c084c776608beee5d bd37097ad fix 432b2b31c Merge branch 'master' into delete-range-less be1553484 codec(ticdc): avro simplify the unit test (#11010) โœ… Pre merged ๐ŸŽ‰ โœ… ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache not saved (git/pingcap/tiflow/rev-be15534-bd37097 already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 999 0 --:--:-- --:--:-- --:--:-- 1025 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 2780 0 --:--:-- --:--:-- --:--:-- 2928 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1695 0 --:--:-- --:--:-- --:--:-- 1708 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 451 0 --:--:-- --:--:-- --:--:-- 455 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/06ee59bd9c683757f75fdd3469f37f50988a1a2f/centos7/tidb-server.tar.gz 2024-05-06 14:20:23 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/06ee59bd9c683757f75fdd3469f37f50988a1a2f/centos7/tidb-server.tar.gz [536606546/536606546] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz 2024-05-06 14:20:38 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz 2024-05-06 14:21:03 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e50de84e6d6ecdcc108990217b70b6bb3f50271/centos7/tiflash.tar.gz 2024-05-06 14:21:23 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e50de84e6d6ecdcc108990217b70b6bb3f50271/centos7/tiflash.tar.gz [456051121/456051121] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-05-06 14:21:28 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-05-06 14:21:29 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-05-06 14:21:29 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-05-06 14:21:30 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-05-06 14:21:35 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-05-06 14:21:43 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x 6 jenkins jenkins 4.0K May 6 14:21 . drwxr-sr-x 19 jenkins jenkins 4.0K May 6 14:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M May 6 14:21 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M May 6 14:21 jq drwxr-sr-x 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx 1 jenkins jenkins 13 May 6 12:44 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 May 6 12:44 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 May 6 12:44 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 May 6 12:44 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M May 6 12:05 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M May 6 12:45 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M May 6 11:44 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M May 6 12:44 tiflash -rwxr-xr-x 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x 6 jenkins jenkins 4.0K May 6 14:21 . drwxr-sr-x 3 jenkins jenkins 4.0K May 6 14:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M May 6 14:21 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M May 6 14:21 jq drwxr-sr-x 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx 1 jenkins jenkins 13 May 6 12:44 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 May 6 12:44 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 May 6 12:44 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 May 6 12:44 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M May 6 12:05 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M May 6 12:45 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M May 6 11:44 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M May 6 12:44 tiflash -rwxr-xr-x 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 8.0K drwxr-sr-x 2 jenkins jenkins 4.0K May 6 14:21 . drwxr-sr-x 19 jenkins jenkins 4.0K May 6 14:21 .. + '[' -f ./bin/cdc ']' + make cdc CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:21:49" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc ./cmd/cdc go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/coreos/go-semver v0.3.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/IBM/sarama v1.41.2 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/DATA-DOG/go-sqlmock v1.5.0 go: downloading github.com/imdario/mergo v0.3.16 go: downloading github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 go: downloading github.com/swaggo/gin-swagger v1.2.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/google/uuid v1.6.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading go.uber.org/atomic v1.11.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/hashicorp/golang-lru v0.5.1 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/r3labs/diff v1.1.0 go: downloading github.com/YangKeao/seahash v0.0.0-20240229041150-e7bf269c3140 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/segmentio/kafka-go v0.4.41-0.20230526171612-f057b1d369cd go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/goccy/go-json v0.10.2 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading golang.org/x/sys v0.19.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/uber-go/atomic v1.4.0 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/swaggo/swag v1.16.3 go: downloading github.com/golang/mock v1.6.0 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pingcap/tidb-dashboard v0.0.0-20240326110213-9768844ff5d7 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading gorm.io/gorm v1.24.5 go: downloading golang.org/x/term v0.19.0 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/xdg-go/scram v1.1.2 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/KyleBanks/depth v1.2.1 go: downloading github.com/go-openapi/spec v0.21.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/joomcode/errorx v1.0.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/aws/aws-sdk-go-v2/config v1.18.30 go: downloading github.com/aws/aws-sdk-go-v2/credentials v1.13.29 go: downloading github.com/aws/aws-sdk-go-v2/service/glue v1.58.1 go: downloading github.com/jarcoal/httpmock v1.2.0 go: downloading github.com/mailru/easyjson v0.7.7 go: downloading github.com/glebarez/sqlite v1.7.0 go: downloading gorm.io/driver/mysql v1.3.3 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/blacktear23/go-proxyprotocol v1.0.6 go: downloading github.com/pingcap/fn v1.0.0 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2 go: downloading github.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 go: downloading github.com/shopspring/decimal v1.3.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/xdg-go/stringprep v1.0.4 go: downloading github.com/xdg-go/pbkdf2 v1.0.0 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/glebarez/go-sqlite v1.21.2 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/go-ozzo/ozzo-validation/v4 v4.3.0 go: downloading github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.6 go: downloading github.com/aws/aws-sdk-go-v2/internal/ini v1.3.37 go: downloading github.com/aws/aws-sdk-go-v2/service/sso v1.12.14 go: downloading github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sts v1.20.1 go: downloading github.com/go-openapi/jsonreference v0.21.0 go: downloading github.com/go-openapi/jsonpointer v0.21.0 go: downloading github.com/go-openapi/swag v0.23.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.36 go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading modernc.org/libc v1.37.1 go: downloading modernc.org/sqlite v1.27.0 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.30 go: downloading github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.30 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/josharian/intern v1.0.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading k8s.io/api v0.28.6 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading k8s.io/apimachinery v0.28.6 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading modernc.org/memory v1.7.2 go: downloading modernc.org/mathutil v1.6.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + '[' -f ./bin/cdc_kafka_consumer ']' + make kafka_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:24:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go + '[' -f ./bin/cdc_storage_consumer ']' + make storage_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:00" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go + '[' -f ./bin/cdc.test ']' + make integration_test_build cd tools/check && GO111MODULE=on go build -mod=mod -o ../bin/failpoint-ctl github.com/pingcap/failpoint/failpoint-ctl go: downloading github.com/pingcap/failpoint v0.0.0-20210316064728-7acb0f0a3dfd go: downloading github.com/sergi/go-diff v1.1.0 CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc_pulsar_consumer ./cmd/pulsar-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/oauth2-server ./cmd/oauth2-server/main.go go: downloading github.com/go-oauth2/oauth2/v4 v4.5.2 go: downloading github.com/tidwall/buntdb v1.3.0 go: downloading github.com/tidwall/rtred v0.1.2 go: downloading github.com/tidwall/match v1.1.1 go: downloading github.com/tidwall/grect v0.1.4 go: downloading github.com/tidwall/gjson v1.14.3 go: downloading github.com/tidwall/tinyqueue v0.1.1 go: downloading github.com/tidwall/pretty v1.2.0 $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl enable >/dev/null) go: downloading github.com/PingCAP-QE/go-sqlsmith v0.0.0-20231213065948-336e064b488d go: downloading github.com/chzyer/readline v1.5.1 go: downloading github.com/deepmap/oapi-codegen v1.9.0 go: downloading github.com/gogo/gateway v1.1.0 go: downloading github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 go: downloading github.com/getkin/kin-openapi v0.80.0 go: downloading github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 go: downloading go.uber.org/dig v1.13.0 go: downloading github.com/mattn/go-shellwords v1.0.12 go: downloading go.uber.org/ratelimit v0.2.0 go: downloading github.com/VividCortex/mysqlerr v1.0.0 go: downloading go.uber.org/goleak v1.3.0 go: downloading github.com/bradleyjkemp/grpc-tools v0.2.5 go: downloading github.com/integralist/go-findroot v0.0.0-20160518114804-ac90681525dc go: downloading github.com/jmoiron/sqlx v1.3.3 go: downloading upper.io/db.v3 v3.7.1+incompatible go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0 go: downloading github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 go: downloading github.com/ngaut/log v0.0.0-20210830112240-0124ec040aeb go: downloading github.com/improbable-eng/grpc-web v0.12.0 go: downloading github.com/ghodss/yaml v1.0.0 go: downloading github.com/rs/cors v1.7.0 go: downloading github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f CGO_ENABLED=1 GO111MODULE=on go test -p 3 --race --tags=intest -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -c -cover -covermode=atomic \ -coverpkg=github.com/pingcap/tiflow/... \ -o bin/cdc.test github.com/pingcap/tiflow/cmd/cdc \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-28-gbd37097ad" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-06 06:25:10" -X "github.com/pingcap/tiflow/pkg/version.GitHash=bd37097adb9743a2e37a9b3c084c776608beee5d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.6 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-28-gbd37097ad"' -o bin/cdc ./cmd/cdc/main.go \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null) + ls -alh ./bin total 1.2G drwxr-sr-x 2 jenkins jenkins 4.0K May 6 14:30 . drwxr-sr-x 19 jenkins jenkins 4.0K May 6 14:21 .. -rwxr-xr-x 1 jenkins jenkins 220M May 6 14:30 cdc -rwxr-xr-x 1 jenkins jenkins 363M May 6 14:30 cdc.test -rwxr-xr-x 1 jenkins jenkins 183M May 6 14:25 cdc_kafka_consumer -rwxr-xr-x 1 jenkins jenkins 183M May 6 14:25 cdc_pulsar_consumer -rwxr-xr-x 1 jenkins jenkins 182M May 6 14:25 cdc_storage_consumer -rwxr-xr-x 1 jenkins jenkins 12M May 6 14:25 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-28-gbd37097ad Git Commit Hash: bd37097adb9743a2e37a9b3c084c776608beee5d Git Branch: HEAD UTC Build Time: 2024-05-06 06:25:10 Go Version: go version go1.21.6 linux/amd64 Failpoint Build: true [Pipeline] } Cache not saved (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-bd37097 already exists) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x 6 jenkins jenkins 4.0K May 6 14:30 . drwxr-sr-x 19 jenkins jenkins 4.0K May 6 14:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K May 6 14:30 bin -rwxr-xr-x 1 jenkins jenkins 220M May 6 14:30 cdc -rwxr-xr-x 1 jenkins jenkins 363M May 6 14:30 cdc.test -rwxr-xr-x 1 jenkins jenkins 183M May 6 14:25 cdc_kafka_consumer -rwxr-xr-x 1 jenkins jenkins 183M May 6 14:25 cdc_pulsar_consumer -rwxr-xr-x 1 jenkins jenkins 182M May 6 14:25 cdc_storage_consumer drwxr-sr-x 4 jenkins jenkins 4.0K May 6 14:30 etc -rwxr-xr-x 1 jenkins jenkins 17M May 6 14:30 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M May 6 14:30 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M May 6 14:30 jq drwxr-sr-x 3 jenkins jenkins 4.0K May 6 14:30 lib lrwxrwxrwx 1 jenkins jenkins 13 May 6 14:30 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K May 6 14:30 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 May 6 14:30 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K May 6 14:30 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 May 6 14:30 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 May 6 14:30 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M May 6 14:30 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M May 6 14:30 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M May 6 14:30 minio -rwxr-xr-x 1 jenkins jenkins 12M May 6 14:25 oauth2-server -rwxr-xr-x 1 jenkins jenkins 37M May 6 14:30 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M May 6 14:30 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M May 6 14:30 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M May 6 14:30 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M May 6 14:30 pd-server -rwxr-xr-x 1 jenkins jenkins 26M May 6 14:30 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M May 6 14:30 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M May 6 14:30 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K May 6 14:30 share -rwxr-xr-x 1 jenkins jenkins 32M May 6 14:30 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M May 6 14:30 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M May 6 14:30 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M May 6 14:30 tiflash -rwxr-xr-x 1 jenkins jenkins 418M May 6 14:30 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M May 6 14:30 xprog [Pipeline] } Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 71.58 secs (52111982 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G18') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G19') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G20') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G21') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G18') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G19') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G20') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G21') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk Agent pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-fd5hx-dj1d5 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4a847aa3855f7e007b483b9ec5fb44f551571724" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-fd5hx" name: "pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5546ccd9; decorates RemoteLauncher[hudson.remoting.Channel@11c5fc63:JNLP4-connect connection from 10.233.71.25/10.233.71.25:42334] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Agent pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-m9d7x-7tg7c --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "362b454fc822d8ef6fa6fe9e67da602d6b1c154b" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-m9d7x" name: "pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx [Pipeline] node Agent pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-tgl1n-mzzk8 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "f0d977d849b132622c5b0831a2127d5eb102f3ca" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-tgl1n" name: "pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5d3bb5bb; decorates RemoteLauncher[hudson.remoting.Channel@7e208cbc:JNLP4-connect connection from 10.233.72.223/10.233.72.223:49924] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Running on pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w [Pipeline] { Agent pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-z94hj-3d01k --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4e6c5d77f18f2fbf79e535bbdd6b861249fbdb2b" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-z94hj" name: "pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test [Pipeline] podTemplate [Pipeline] { [Pipeline] node [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] node [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg [Pipeline] cache No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@532dd900; decorates RemoteLauncher[hudson.remoting.Channel@6addf55c:JNLP4-connect connection from 10.233.69.45/10.233.69.45:48158] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@560b9700; decorates RemoteLauncher[hudson.remoting.Channel@1cb62ab7:JNLP4-connect connection from 10.233.67.220/10.233.67.220:46014] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-c4xqq-1lrjw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3bbcb6aef13838846247f7f353725db827ff09fd" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-c4xqq" name: "pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-w1z4r-n67n2 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "2a38bb0f3affdd92dcd6016eabd4acf3b571033a" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-w1z4r" name: "pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Running on pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-2fwdx-4vz8x --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "6d5eb46685f3e1b580321bf2b8b4eac5cd74fdda" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-2fwdx" name: "pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Running on pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-930xb-c01q7 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "22ceb294064849eee300ced7ceaca8f5cc52497c" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-930xb" name: "pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-rqn19-d4crb --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "e95cea3d10e309ebd00ff2b2b6849ec0065de608" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-rqn19" name: "pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-fw71j-f959l --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "6906757479bb84e0046c6bd1e38c53679230e566" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-fw71j" name: "pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_test.yaml from git https://github.com/PingCAP-QE/ci.git Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 29.75 secs (125364236 bytes/sec) [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] sh [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G00 Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=264653a8-d8fa-4099-bc3d-5cc47bb2428a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-fd5hx GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-fd5hx pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] node [Pipeline] node [Pipeline] { [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] stage The recommended git tool is: git [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv [Pipeline] withCredentials No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6b46924f; decorates RemoteLauncher[hudson.remoting.Channel@1a24c568:JNLP4-connect connection from 10.233.105.166/10.233.105.166:49558] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4b133f2e; decorates RemoteLauncher[hudson.remoting.Channel@62ac0970:JNLP4-connect connection from 10.233.123.84/10.233.123.84:58442] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] { [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5a40370c; decorates RemoteLauncher[hudson.remoting.Channel@3f58264c:JNLP4-connect connection from 10.233.66.134/10.233.66.134:49500] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@28f03ad7; decorates RemoteLauncher[hudson.remoting.Channel@42490c33:JNLP4-connect connection from 10.233.67.111/10.233.67.111:37232] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@e1acaed; decorates RemoteLauncher[hudson.remoting.Channel@5ef90f7b:JNLP4-connect connection from 10.233.67.150/10.233.67.150:50958] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@23965c9d; decorates RemoteLauncher[hudson.remoting.Channel@2c1c559f:JNLP4-connect connection from 10.233.67.125/10.233.67.125:46512] will be ignored (a typical symptom is the Git executable not being run inside a designated container) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] cache Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" start tidb cluster in /tmp/tidb_cdc_test/bdr_mode Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-n74j6-crm2g --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "cfe02f994afef802bbfe74ec3a4163c7efeb2137" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-n74j6" name: "pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-6fqqh-xqk93 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "10afa5068b15a7b8b494d8cb9526b4e77249cad5" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-6fqqh" name: "pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-1tgp6-49vl6 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0e29c581e8ad5482cf5880fb6ebbef23559f3d06" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-1tgp6" name: "pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-s7cvf-4wl4m --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "49151956e52bedd5811597387bd14e1531048c49" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-s7cvf" name: "pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Agent pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-rnlk7-82rzg --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "07e8b6e9320ffb2349b1e4bbe91045f53c48223f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-rnlk7" name: "pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-krg3n-w16k0 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "e5413ebcac3b6ae845def2035d6762b3db590795" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-krg3n" name: "pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Agent pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-nrwqc-kcfgd --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "cde140c66f24df8744b34f3f6096299f8c97f18c" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-nrwqc" name: "pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Agent pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-3nzd5-phv98 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "d9976d04ebba8c3556113bab67bec4f6ac86bba7" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-3nzd5" name: "pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Running on pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Agent pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-56k4h-xtn1f --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "c130ba586c3028c935e4ff414b6936dcbbb8ee41" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-56k4h" name: "pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Agent pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-8z2x2-884kr --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3f4d5371f969d4623c6725e9d8dfb471dcbc84b1" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-8z2x2" name: "pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test Running on pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Still waiting to schedule task โ€˜pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33โ€™ is offline Still waiting to schedule task Waiting for next available executor on โ€˜pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pmโ€™ Agent pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-n5dsj-0rnw1 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "fdd59bc6907b3e911ba13a248007b3d19bf55354" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-n5dsj" name: "pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d326c30ac0018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv, pid:1231, start at 2024-05-06 14:33:00.758942624 +0800 CST m=+5.206308372 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:35:00.765 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:33:00.765 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:23:00.765 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d326c30ac0018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv, pid:1231, start at 2024-05-06 14:33:00.758942624 +0800 CST m=+5.206308372 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:35:00.765 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:33:00.765 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:23:00.765 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d326c33ac0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fd5hx-dj1d5-c42gv, pid:1306, start at 2024-05-06 14:33:00.944171569 +0800 CST m=+5.343969771 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:35:00.953 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:33:00.957 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:23:00.957 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/bdr_mode/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/bdr_mode/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/bdr_mode/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/bdr_mode/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/bdr_mode/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Agent pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 is provisioned from template pingcap_tiflow_pull_cdc_integration_test_1781-t2117-klrn9 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "643cd8dc8b4d6efcafea9c37be552e9f5684146f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_test_1781-t2117" name: "pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "hub.pingcap.net/jenkins/centos7_golang-1.21:latest" name: "golang" resources: limits: memory: "16Gi" cpu: "6" requests: memory: "12Gi" cpu: "4" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test [Mon May 6 14:33:05 CST 2024] <<<<<< START cdc server in bdr_mode case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bdr_mode.26152617.out server --log-file /tmp/tidb_cdc_test/bdr_mode/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/bdr_mode/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:33:08 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fc3b987d-499e-4c22-9cbe-644dc3ae3e2b {"id":"fc3b987d-499e-4c22-9cbe-644dc3ae3e2b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977186} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ae227cb fc3b987d-499e-4c22-9cbe-644dc3ae3e2b /tidb/cdc/default/default/upstream/7365770869186304235 {"id":7365770869186304235,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fc3b987d-499e-4c22-9cbe-644dc3ae3e2b {"id":"fc3b987d-499e-4c22-9cbe-644dc3ae3e2b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977186} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ae227cb fc3b987d-499e-4c22-9cbe-644dc3ae3e2b /tidb/cdc/default/default/upstream/7365770869186304235 {"id":7365770869186304235,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fc3b987d-499e-4c22-9cbe-644dc3ae3e2b {"id":"fc3b987d-499e-4c22-9cbe-644dc3ae3e2b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977186} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ae227cb fc3b987d-499e-4c22-9cbe-644dc3ae3e2b /tidb/cdc/default/default/upstream/7365770869186304235 {"id":7365770869186304235,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Mon May 6 14:33:08 CST 2024] <<<<<< START cdc server in bdr_mode case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8400/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bdr_mode.26732675.out server --log-file /tmp/tidb_cdc_test/bdr_mode/cdcdown.log --log-level debug --data-dir /tmp/tidb_cdc_test/bdr_mode/cdc_datadown --cluster-id default --addr 127.0.0.1:8400 --pd http://127.0.0.1:2479 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8400/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8400 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8400; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8400/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8400 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8400 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8400 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:33:12 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf2fd371-a3ea-433d-a989-c5035df1bf7e {"id":"bf2fd371-a3ea-433d-a989-c5035df1bf7e","address":"127.0.0.1:8400","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/35118f4c9ae2518c bf2fd371-a3ea-433d-a989-c5035df1bf7e /tidb/cdc/default/default/upstream/7365770861062035157 {"id":7365770861062035157,"pd-endpoints":"http://127.0.0.1:2479,http://127.0.0.1:2479","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf2fd371-a3ea-433d-a989-c5035df1bf7e {"id":"bf2fd371-a3ea-433d-a989-c5035df1bf7e","address":"127.0.0.1:8400","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/35118f4c9ae2518c bf2fd371-a3ea-433d-a989-c5035df1bf7e /tidb/cdc/default/default/upstream/7365770861062035157 {"id":7365770861062035157,"pd-endpoints":"http://127.0.0.1:2479,http://127.0.0.1:2479","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf2fd371-a3ea-433d-a989-c5035df1bf7e {"id":"bf2fd371-a3ea-433d-a989-c5035df1bf7e","address":"127.0.0.1:8400","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/35118f4c9ae2518c bf2fd371-a3ea-433d-a989-c5035df1bf7e /tidb/cdc/default/default/upstream/7365770861062035157 {"id":7365770861062035157,"pd-endpoints":"http://127.0.0.1:2479,http://127.0.0.1:2479","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bdr_mode.cli.2724.out cli changefeed create --sink-uri=mysql://root@127.0.0.1:3306 -c test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/bdr_mode/conf/up.toml Create changefeed successfully! ID: test-1 Info: {"upstream_id":7365770869186304235,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306","create_time":"2024-05-06T14:33:12.561532988+08:00","start_ts":449570981128437764,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":true,"enable_table_monitor":false,"bdr_mode":true,"sync_point_interval":30000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449570981128437764,"checkpoint_ts":449570981128437764,"checkpoint_time":"2024-05-06 14:33:12.415"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bdr_mode.cli.2761.out cli changefeed create --sink-uri=mysql://root@127.0.0.1:4000 -c test-2 --server http://127.0.0.1:8400 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/bdr_mode/conf/down.toml Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 23.25 secs (160424059 bytes/sec) [Pipeline] { [Pipeline] cache Create changefeed successfully! ID: test-2 Info: {"upstream_id":7365770861062035157,"namespace":"default","id":"test-2","sink_uri":"mysql://root@127.0.0.1:4000","create_time":"2024-05-06T14:33:14.478202091+08:00","start_ts":449570981637783555,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":true,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449570981637783555,"checkpoint_ts":449570981637783555,"checkpoint_time":"2024-05-06 14:33:14.358"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 17.40 secs (214431883 bytes/sec) [Pipeline] { [Pipeline] cache Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 24.81 secs (150361784 bytes/sec) [Pipeline] { table tidb_cdc.syncpoint_v1 exists table tidb_cdc.syncpoint_v1 does not exists table bdr_mode.finish_mark exists check diff failed 1-th time, retry later [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G02 Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=b9ff9175-3298-4029-b24e-f6cd675e683b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-m9d7x GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-m9d7x pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=80daec70-62c6-4d2a-b107-231df4bb7f8a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-tgl1n GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-tgl1n pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G01 Run cases: http_api http_api_tls api_v2 http_api_tls_with_user_auth cli_tls_with_auth kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check canal_json_storage_basic canal_json_storage_partition_table multi_tables_ddl PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=60b6c0f2-461a-4703-8c20-d58e6b2809de BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-z94hj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s pingcap_tiflow_pull_cdc_integration_test_1781-z94hj GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/http_api/run.sh using Sink-Type: mysql... <<================= WARNING: Running pip install with root privileges is generally not a good idea. Try `__main__.py install --user` instead. Collecting requests==2.26.0 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { check diff failed 2-th time, retry later [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN Downloading https://files.pythonhosted.org/packages/92/96/144f70b972a9c0eabbd4391ef93ccd49d0f2747f4f6a2a2738e99e5adc65/requests-2.26.0-py2.py3-none-any.whl (62kB) [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@27c3c363; decorates RemoteLauncher[hudson.remoting.Channel@24fcd65b:JNLP4-connect connection from 10.233.107.250/10.233.107.250:40830] will be ignored (a typical symptom is the Git executable not being run inside a designated container) [Pipeline] { Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] dir No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@66b666a0; decorates RemoteLauncher[hudson.remoting.Channel@73d5db32:JNLP4-connect connection from 10.233.105.125/10.233.105.125:60526] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { Collecting idna<4,>=2.5; python_version >= "3" (from requests==2.26.0) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] cache No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6c10c7e6; decorates RemoteLauncher[hudson.remoting.Channel@1f5d9ae8:JNLP4-connect connection from 10.233.71.165/10.233.71.165:49602] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4b43c57e; decorates RemoteLauncher[hudson.remoting.Channel@65b8ba5e:JNLP4-connect connection from 10.233.86.97/10.233.86.97:59056] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@61692d75; decorates RemoteLauncher[hudson.remoting.Channel@2cf1ccbd:JNLP4-connect connection from 10.233.123.116/10.233.123.116:43934] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@77c7fd2f; decorates RemoteLauncher[hudson.remoting.Channel@629eb7a0:JNLP4-connect connection from 10.233.97.237/10.233.97.237:44804] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5b96c48a; decorates RemoteLauncher[hudson.remoting.Channel@5e9a386:JNLP4-connect connection from 10.233.93.56/10.233.93.56:50382] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@50bcfa1c; decorates RemoteLauncher[hudson.remoting.Channel@9fc122f:JNLP4-connect connection from 10.233.69.107/10.233.69.107:50970] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@443f6f14; decorates RemoteLauncher[hudson.remoting.Channel@61e469b9:JNLP4-connect connection from 10.233.67.254/10.233.67.254:59538] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Downloading https://files.pythonhosted.org/packages/e5/3e/741d8c82801c347547f8a2a06aa57dbb1992be9e948df2ea0eda2c8b79e8/idna-3.7-py3-none-any.whl (66kB) Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1786e7b5; decorates RemoteLauncher[hudson.remoting.Channel@7b169e89:JNLP4-connect connection from 10.233.72.184/10.233.72.184:50110] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@53342304; decorates RemoteLauncher[hudson.remoting.Channel@6b5672c7:JNLP4-connect connection from 10.233.66.54/10.233.66.54:37752] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3d897b4d; decorates RemoteLauncher[hudson.remoting.Channel@5373fad5:JNLP4-connect connection from 10.233.66.104/10.233.66.104:52164] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning the remote Git repository Using shallow clone with depth 1 check diff failed 3-th time, retry later Cloning repository https://github.com/PingCAP-QE/ci.git start tidb cluster in /tmp/tidb_cdc_test/consistent_replicate_ddl Starting Upstream PD... Requirement already up-to-date: charset-normalizer~=2.0.0; python_version >= "3" in /usr/local/lib/python3.6/site-packages (from requests==2.26.0) Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Collecting certifi>=2017.4.17 (from requests==2.26.0) Downloading https://files.pythonhosted.org/packages/ba/06/a07f096c664aeb9f01624f858c3add0a4e913d6c96257acb4fce61e7de14/certifi-2024.2.2-py3-none-any.whl (163kB) Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Collecting urllib3<1.27,>=1.21.1 (from requests==2.26.0) Downloading https://files.pythonhosted.org/packages/b0/53/aa91e163dcfd1e5b82d8a890ecf13314e3e149c05270cc644581f77f17fd/urllib3-1.26.18-py2.py3-none-any.whl (143kB) Installing collected packages: idna, certifi, urllib3, requests Found existing installation: idna 3.4 Uninstalling idna-3.4: Successfully uninstalled idna-3.4 Found existing installation: certifi 2023.5.7 Uninstalling certifi-2023.5.7: Successfully uninstalled certifi-2023.5.7 Found existing installation: urllib3 1.26.16 Uninstalling urllib3-1.26.16: Successfully uninstalled urllib3-1.26.16 Found existing installation: requests 2.27.1 Uninstalling requests-2.27.1: Successfully uninstalled requests-2.27.1 Successfully installed certifi-2024.2.2 idna-3.7 requests-2.26.0 urllib3-1.26.18 start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) The 1 times to try to start tidb cluster... Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" check diff failed 4-th time, retry later Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Verifying downstream PD is started... Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 check diff failed 5-th time, retry later > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/http_api Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff failed 6-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Avoid second fetch > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 check diff failed 7-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 8.35 secs (446692443 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 8-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270d4680013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl, pid:1229, start at 2024-05-06 14:34:16.752729582 +0800 CST m=+5.239196422 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:16.761 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:16.730 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:16.730 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 9-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270d4680013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl, pid:1229, start at 2024-05-06 14:34:16.752729582 +0800 CST m=+5.239196422 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:16.761 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:16.730 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:16.730 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270d5340015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-m9d7x-7tg7c-hs4dl, pid:1316, start at 2024-05-06 14:34:16.81978647 +0800 CST m=+5.240804636 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:16.826 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:16.831 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:16.831 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/consistent_replicate_ddl/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/consistent_replicate_ddl/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/consistent_replicate_ddl/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/consistent_replicate_ddl/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/consistent_replicate_ddl/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 10-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270f2f80004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz, pid:1246, start at 2024-05-06 14:34:18.689389825 +0800 CST m=+5.228771769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:18.696 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:18.686 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:18.686 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270f2f80004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz, pid:1246, start at 2024-05-06 14:34:18.689389825 +0800 CST m=+5.228771769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:18.696 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:18.686 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:18.686 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3270f3a80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-tgl1n-mzzk8-vvchz, pid:1329, start at 2024-05-06 14:34:18.766500449 +0800 CST m=+5.254251763 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:18.773 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:18.730 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:18.730 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Mon May 6 14:34:21 CST 2024] <<<<<< START cdc server in consistent_replicate_ddl case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.consistent_replicate_ddl.26302632.out server --log-file /tmp/tidb_cdc_test/consistent_replicate_ddl/cdcconsistent_replicate_ddl.server1.log --log-level debug --data-dir /tmp/tidb_cdc_test/consistent_replicate_ddl/cdc_dataconsistent_replicate_ddl.server1 --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 11-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2636.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:34:24 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04733417-2241-449c-b6e6-daad22fa71d6 {"id":"04733417-2241-449c-b6e6-daad22fa71d6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977262} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c0ecccb 04733417-2241-449c-b6e6-daad22fa71d6 /tidb/cdc/default/default/upstream/7365771193924766665 {"id":7365771193924766665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04733417-2241-449c-b6e6-daad22fa71d6 {"id":"04733417-2241-449c-b6e6-daad22fa71d6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977262} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c0ecccb 04733417-2241-449c-b6e6-daad22fa71d6 /tidb/cdc/default/default/upstream/7365771193924766665 {"id":7365771193924766665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04733417-2241-449c-b6e6-daad22fa71d6 {"id":"04733417-2241-449c-b6e6-daad22fa71d6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977262} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c0ecccb 04733417-2241-449c-b6e6-daad22fa71d6 /tidb/cdc/default/default/upstream/7365771193924766665 {"id":7365771193924766665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check diff failed 12-th time, retry later ***************** properties ***************** "dotransactions"="false" "mysql.user"="root" "mysql.db"="consistent_replicate_ddl" "threadcount"="10" "fieldcount"="3" "recordcount"="50" "scanproportion"="0" "updateproportion"="0" "requestdistribution"="uniform" "mysql.port"="4000" "readproportion"="0" "operationcount"="0" "workload"="core" "readallfields"="true" "insertproportion"="0" "mysql.host"="127.0.0.1" ********************************************** Run finished, takes 10.302953ms INSERT - Takes(s): 0.0, Count: 50, OPS: 9550.1, Avg(us): 1766, Min(us): 894, Max(us): 4782, 95th(us): 5000, 99th(us): 5000 + set +x + tso='449570999916560386 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449570999916560386 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:34:25 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.26742676.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table consistent_replicate_ddl.usertable1 not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327141f80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s, pid:1273, start at 2024-05-06 14:34:23.791620102 +0800 CST m=+5.519291599 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:23.798 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:23.792 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:23.792 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327141f80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s, pid:1273, start at 2024-05-06 14:34:23.791620102 +0800 CST m=+5.519291599 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:23.798 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:23.792 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:23.792 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3271435c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-z94hj-3d01k-hln7s, pid:1350, start at 2024-05-06 14:34:23.864646507 +0800 CST m=+5.533944179 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:36:23.871 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:34:23.831 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:24:23.831 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/http_api/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/http_api/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/http_api/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/http_api/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/http_api/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 13-th time, retry later table consistent_replicate_ddl.usertable1 not exists for 2-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:34:28 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cc066fa8-b53b-40ce-8813-ef229be2d2f0 {"id":"cc066fa8-b53b-40ce-8813-ef229be2d2f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977265} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c1063d7 cc066fa8-b53b-40ce-8813-ef229be2d2f0 /tidb/cdc/default/default/upstream/7365771197744714094 {"id":7365771197744714094,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cc066fa8-b53b-40ce-8813-ef229be2d2f0 {"id":"cc066fa8-b53b-40ce-8813-ef229be2d2f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977265} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c1063d7 cc066fa8-b53b-40ce-8813-ef229be2d2f0 /tidb/cdc/default/default/upstream/7365771197744714094 {"id":7365771197744714094,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cc066fa8-b53b-40ce-8813-ef229be2d2f0 {"id":"cc066fa8-b53b-40ce-8813-ef229be2d2f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977265} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c1063d7 cc066fa8-b53b-40ce-8813-ef229be2d2f0 /tidb/cdc/default/default/upstream/7365771197744714094 {"id":7365771197744714094,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2733.out cli changefeed create --start-ts=449570999916560386 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ [Mon May 6 14:34:28 CST 2024] <<<<<< START cdc server in http_api case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.http_api.27422744.out server --log-file /tmp/tidb_cdc_test/http_api/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/http_api/cdc_data1 --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: 9361c37d-b6a0-4870-83c1-94d2ff804338 Info: {"upstream_id":7365771197744714094,"namespace":"default","id":"9361c37d-b6a0-4870-83c1-94d2ff804338","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:34:29.124053354+08:00","start_ts":449570999916560386,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449570999916560386,"checkpoint_ts":449570999916560386,"checkpoint_time":"2024-05-06 14:34:24.086"} PASS check diff failed 14-th time, retry later coverage: 2.4% of statements in github.com/pingcap/tiflow/... table consistent_replicate_ddl.usertable1 not exists for 3-th check, retry later + set +x + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:34:31 GMT < Content-Length: 853 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc.test cli capture list 2>&1 | grep '"is-owner": true' check diff failed 15-th time, retry later table consistent_replicate_ddl.usertable1 exists table consistent_replicate_ddl.usertable2 not exists for 1-th check, retry later "is-owner": true, run task successfully check diff failed 16-th time, retry later table consistent_replicate_ddl.usertable2 not exists for 2-th check, retry later owner pid: 2747 owner id bd2b0e56-8e36-4284-8fdf-17918fd5d132 default [Mon May 6 14:34:34 CST 2024] <<<<<< START cdc server in http_api case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.http_api.28722874.out server --log-file /tmp/tidb_cdc_test/http_api/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/http_api/cdc_data2 --cluster-id default --addr 127.0.0.1:8301 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 17-th time, retry later table consistent_replicate_ddl.usertable2 exists table consistent_replicate_ddl.usertable3 not exists for 1-th check, retry later check diff failed 18-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:34:37 GMT < Content-Length: 1303 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/capture/c9a1d78a-9a69-40d5-a1f8-02ad005b5927 {"id":"c9a1d78a-9a69-40d5-a1f8-02ad005b5927","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977274} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed68 c9a1d78a-9a69-40d5-a1f8-02ad005b5927 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/capture/c9a1d78a-9a69-40d5-a1f8-02ad005b5927 {"id":"c9a1d78a-9a69-40d5-a1f8-02ad005b5927","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977274} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed68 c9a1d78a-9a69-40d5-a1f8-02ad005b5927 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bd2b0e56-8e36-4284-8fdf-17918fd5d132 {"id":"bd2b0e56-8e36-4284-8fdf-17918fd5d132","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977268} /tidb/cdc/default/__cdc_meta__/capture/c9a1d78a-9a69-40d5-a1f8-02ad005b5927 {"id":"c9a1d78a-9a69-40d5-a1f8-02ad005b5927","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977274} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed39 bd2b0e56-8e36-4284-8fdf-17918fd5d132 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9c2aed68 c9a1d78a-9a69-40d5-a1f8-02ad005b5927 /tidb/cdc/default/default/upstream/7365771227242333478 {"id":7365771227242333478,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc.test cli capture list 2>&1 | grep -v "bd2b0e56-8e36-4284-8fdf-17918fd5d132 default" | grep id table consistent_replicate_ddl.usertable3 not exists for 2-th check, retry later "id": "c9a1d78a-9a69-40d5-a1f8-02ad005b5927", run task successfully check diff failed 19-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 3-th check, retry later capture_id: c9a1d78a-9a69-40d5-a1f8-02ad005b5927 pass test: check health pass test: get status pass test: create changefeed check diff failed 20-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 4-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 23.29 secs (160129280 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 21-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 5-th check, retry later table row_format.finish_mark not exists for 1-th check, retry later table test.simple1 not exists for 1-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 6-th check, retry later check diff failed 22-th time, retry later table row_format.finish_mark not exists for 2-th check, retry later table test.simple1 exists table consistent_replicate_ddl.usertable3 not exists for 7-th check, retry later check diff failed 23-th time, retry later pass test: list changefeed pass test: get changefeed pass test: pause changefeed pass test: update changefeed check diff failed 24-th time, retry later table row_format.finish_mark not exists for 3-th check, retry later wait process 2747 exit for 1-th time... table consistent_replicate_ddl.usertable3 not exists for 8-th check, retry later wait process 2747 exit for 2-th time... wait process 2747 exit for 3-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (2747) - No such process wait process 2747 exit for 4-th time... process 2747 already exit cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' check diff failed 25-th time, retry later table row_format.finish_mark not exists for 4-th check, retry later run task failed 1-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 9-th check, retry later table row_format.finish_mark not exists for 5-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 10-th check, retry later check diff failed 26-th time, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 2-th time, retry later table row_format.finish_mark not exists for 6-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 11-th check, retry later check diff failed 27-th time, retry later table row_format.finish_mark not exists for 7-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 12-th check, retry later check diff failed 28-th time, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 3-th time, retry later table row_format.finish_mark not exists for 8-th check, retry later check diff failed 29-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 13-th check, retry later table row_format.finish_mark not exists for 9-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 17.08 secs (218368707 bytes/sec) [Pipeline] { [Pipeline] cache table consistent_replicate_ddl.usertable3 not exists for 14-th check, retry later check diff failed 30-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 15-th check, retry later table row_format.finish_mark not exists for 10-th check, retry later check diff failed at last There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log cat: /tmp/tidb_cdc_test/bdr_mode/sync_diff/output/sync_diff.log: No such file or directory cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 4-th time, retry later table row_format.finish_mark not exists for 11-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 16-th check, retry later table row_format.finish_mark not exists for 12-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 17-th check, retry later table row_format.finish_mark not exists for 13-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 18-th check, retry later table row_format.finish_mark not exists for 14-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 19-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 5-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 20-th check, retry later table row_format.finish_mark not exists for 15-th check, retry later table row_format.finish_mark not exists for 16-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 21-th check, retry later table row_format.finish_mark not exists for 17-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 22-th check, retry later table row_format.finish_mark not exists for 18-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 23-th check, retry later table row_format.finish_mark not exists for 19-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 24-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' table row_format.finish_mark not exists for 20-th check, retry later run task failed 6-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 25-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 19.40 secs (192259957 bytes/sec) [Pipeline] { [Pipeline] cache table consistent_replicate_ddl.usertable3 not exists for 26-th check, retry later table row_format.finish_mark not exists for 21-th check, retry later table row_format.finish_mark not exists for 22-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 27-th check, retry later table row_format.finish_mark not exists for 23-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 28-th check, retry later table row_format.finish_mark not exists for 24-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 29-th check, retry later table row_format.finish_mark not exists for 25-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 30-th check, retry later table row_format.finish_mark not exists for 26-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 31-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 7-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 32-th check, retry later table row_format.finish_mark not exists for 27-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 11.45 secs (325672689 bytes/sec) [Pipeline] { [Pipeline] cache table row_format.finish_mark not exists for 28-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 33-th check, retry later table row_format.finish_mark not exists for 29-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 34-th check, retry later table row_format.finish_mark not exists for 30-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 35-th check, retry later table row_format.finish_mark not exists for 31-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 36-th check, retry later table row_format.finish_mark not exists for 32-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 37-th check, retry later table row_format.finish_mark not exists for 33-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 38-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 8-th time, retry later table row_format.finish_mark not exists for 34-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 39-th check, retry later table row_format.finish_mark not exists for 35-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 40-th check, retry later table row_format.finish_mark not exists for 36-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 41-th check, retry later table row_format.finish_mark not exists for 37-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 42-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 43-th check, retry later table row_format.finish_mark not exists for 38-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 19.87 secs (187706795 bytes/sec) [Pipeline] { [Pipeline] sh table row_format.finish_mark not exists for 39-th check, retry later [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G06 Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=d9ff3d60-6088-46a3-9204-8b77e0450f58 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-c4xqq GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-c4xqq pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... table consistent_replicate_ddl.usertable3 not exists for 44-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G07 Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=b65babb5-6116-4dc2-81b6-92975b7be2e1 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-w1z4r GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-w1z4r pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G04 Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=61764a9e-327c-469f-b5fc-707a5e1ffb96 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-2fwdx GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-2fwdx pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=7a40deda-3238-43e0-81ee-a9aa0cf42b5e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-930xb GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg pingcap_tiflow_pull_cdc_integration_test_1781-930xb GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... table row_format.finish_mark not exists for 40-th check, retry later [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G05 Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=c81ab01e-7d1e-4335-84b8-791b97d9a7db BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-rqn19 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-rqn19 pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... table consistent_replicate_ddl.usertable3 not exists for 45-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G09 Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=6c0acff1-3bff-4089-a573-34231adfcd5d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-fw71j GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-fw71j pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/sink_retry Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table row_format.finish_mark not exists for 41-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table consistent_replicate_ddl.usertable3 not exists for 46-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table row_format.finish_mark not exists for 42-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table consistent_replicate_ddl.usertable3 not exists for 47-th check, retry later [Pipeline] } Cache not saved (inner-step execution failed) cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 9-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table consistent_replicate_ddl.usertable3 not exists for 48-th check, retry later [Pipeline] withEnv [Pipeline] { table row_format.finish_mark not exists for 43-th check, retry later [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // cache [Pipeline] } [Pipeline] stage [Pipeline] { (Test) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] { table row_format.finish_mark not exists for 44-th check, retry later [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] { [Pipeline] timeout Timeout set to expire in 40 min [Pipeline] { start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... [Pipeline] // dir Post stage [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN table consistent_replicate_ddl.usertable3 not exists for 49-th check, retry later [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] sh [Pipeline] { [Pipeline] { + ls /tmp/tidb_cdc_test/ bdr_mode big_txn cov.bdr_mode.cli.2724.out cov.bdr_mode.cli.2761.out sql_res.bdr_mode.txt ++ find /tmp/tidb_cdc_test/ -type f -name '*.log' + tar -cvzf log-G00.tar.gz /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log /tmp/tidb_cdc_test/bdr_mode/tikv2.log /tmp/tidb_cdc_test/bdr_mode/stdout.log /tmp/tidb_cdc_test/bdr_mode/down_pd.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/error.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/proxy.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/server.log /tmp/tidb_cdc_test/bdr_mode/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/bdr_mode/pd1/region-meta/000001.log /tmp/tidb_cdc_test/bdr_mode/pd1/hot-region/000001.log /tmp/tidb_cdc_test/bdr_mode/tidb.log /tmp/tidb_cdc_test/bdr_mode/tikv2/db/000005.log /tmp/tidb_cdc_test/bdr_mode/pd1.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/bdr_mode/tidb_down.log /tmp/tidb_cdc_test/bdr_mode/cdc.log /tmp/tidb_cdc_test/bdr_mode/tikv1.log /tmp/tidb_cdc_test/bdr_mode/tikv_down/db/000005.log /tmp/tidb_cdc_test/bdr_mode/cdcdown.log /tmp/tidb_cdc_test/bdr_mode/tidb-slow.log /tmp/tidb_cdc_test/bdr_mode/tikv3.log /tmp/tidb_cdc_test/bdr_mode/sync_diff_inspector.log /tmp/tidb_cdc_test/bdr_mode/tikv1/db/000005.log /tmp/tidb_cdc_test/bdr_mode/stdoutdown.log /tmp/tidb_cdc_test/bdr_mode/tikv_down.log /tmp/tidb_cdc_test/bdr_mode/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/bdr_mode/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/bdr_mode/tidb_other.log /tmp/tidb_cdc_test/bdr_mode/tikv3/db/000005.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0002/000002.log tar: Removing leading `/' from member names /tmp/tidb_cdc_test/big_txn/sync_diff/output/sync_diff.log /tmp/tidb_cdc_test/bdr_mode/tikv2.log /tmp/tidb_cdc_test/bdr_mode/stdout.log /tmp/tidb_cdc_test/bdr_mode/down_pd.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/error.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/proxy.log /tmp/tidb_cdc_test/bdr_mode/tiflash/log/server.log /tmp/tidb_cdc_test/bdr_mode/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/bdr_mode/pd1/region-meta/000001.log /tmp/tidb_cdc_test/bdr_mode/pd1/hot-region/000001.log /tmp/tidb_cdc_test/bdr_mode/tidb.log /tmp/tidb_cdc_test/bdr_mode/tikv2/db/000005.log /tmp/tidb_cdc_test/bdr_mode/pd1.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_datadown/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/bdr_mode/tidb_down.log /tmp/tidb_cdc_test/bdr_mode/cdc.log /tmp/tidb_cdc_test/bdr_mode/tikv1.log ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] { ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] { [Pipeline] { /tmp/tidb_cdc_test/bdr_mode/tikv_down/db/000005.log /tmp/tidb_cdc_test/bdr_mode/cdcdown.log /tmp/tidb_cdc_test/bdr_mode/tidb-slow.log /tmp/tidb_cdc_test/bdr_mode/tikv3.log [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] cache /tmp/tidb_cdc_test/bdr_mode/sync_diff_inspector.log /tmp/tidb_cdc_test/bdr_mode/tikv1/db/000005.log /tmp/tidb_cdc_test/bdr_mode/stdoutdown.log /tmp/tidb_cdc_test/bdr_mode/tikv_down.log /tmp/tidb_cdc_test/bdr_mode/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/bdr_mode/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/bdr_mode/tidb_other.log /tmp/tidb_cdc_test/bdr_mode/tikv3/db/000005.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/bdr_mode/cdc_data/tmp/sorter/0002/000002.log + ls -alh log-G00.tar.gz -rw-r--r--. 1 jenkins jenkins 4.4M May 6 14:36 log-G00.tar.gz ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark not exists for 45-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 50-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327805180001 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx, pid:1224, start at 2024-05-06 14:36:14.534674895 +0800 CST m=+5.167524425 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:14.541 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:14.534 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:14.534 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327805180001 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx, pid:1224, start at 2024-05-06 14:36:14.534674895 +0800 CST m=+5.167524425 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:14.541 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:14.534 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:14.534 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327806980006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-c4xqq-1lrjw-gh4qx, pid:1305, start at 2024-05-06 14:36:14.63635869 +0800 CST m=+5.219026078 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:14.646 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:14.630 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:14.630 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark not exists for 46-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table consistent_replicate_ddl.usertable3 not exists for 51-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2601.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark not exists for 47-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table consistent_replicate_ddl.usertable3 not exists for 52-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327838100003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg, pid:1177, start at 2024-05-06 14:36:17.798155114 +0800 CST m=+5.709432822 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.807 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:17.796 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:17.796 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327840800013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk, pid:1241, start at 2024-05-06 14:36:18.370089973 +0800 CST m=+5.331510384 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:18.378 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:18.336 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:18.336 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449571029774237697 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571029774237697 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "scanproportion"="0" "operationcount"="0" "requestdistribution"="uniform" "insertproportion"="0" "workload"="core" "updateproportion"="0" "mysql.db"="sink_retry" "mysql.host"="127.0.0.1" "mysql.port"="4000" "recordcount"="10" "readproportion"="0" "mysql.user"="root" "threadcount"="2" "dotransactions"="false" "readallfields"="true" ********************************************** Run finished, takes 8.070136ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2187.2, Avg(us): 1518, Min(us): 893, Max(us): 3427, 95th(us): 4000, 99th(us): 4000 [Mon May 6 14:36:19 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.26592661.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32782b480013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w, pid:1226, start at 2024-05-06 14:36:17.008594088 +0800 CST m=+5.642358892 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.018 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:16.978 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:16.978 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32782b480013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w, pid:1226, start at 2024-05-06 14:36:17.008594088 +0800 CST m=+5.642358892 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.018 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:16.978 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:16.978 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32782d940013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-w1z4r-n67n2-dzr1w, pid:1297, start at 2024-05-06 14:36:17.156879925 +0800 CST m=+5.734688675 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.165 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:17.125 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:17.125 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table row_format.finish_mark not exists for 48-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32784ec00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:1286, start at 2024-05-06 14:36:19.284640061 +0800 CST m=+5.594520455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:19.292 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:19.298 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:19.298 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table consistent_replicate_ddl.usertable3 not exists for 53-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327838100003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg, pid:1177, start at 2024-05-06 14:36:17.798155114 +0800 CST m=+5.709432822 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.807 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:17.796 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:17.796 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327838bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-930xb-c01q7-6qbpg, pid:1252, start at 2024-05-06 14:36:17.877779344 +0800 CST m=+5.698936062 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:17.889 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:17.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:17.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Mon May 6 14:36:21 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.26072609.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327840800013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk, pid:1241, start at 2024-05-06 14:36:18.370089973 +0800 CST m=+5.331510384 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:18.378 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:18.336 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:18.336 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327840e00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-2fwdx-4vz8x-2lkxk, pid:1326, start at 2024-05-06 14:36:18.387891503 +0800 CST m=+5.297100570 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:18.395 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:18.360 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:18.360 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32784ec00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:1286, start at 2024-05-06 14:36:19.284640061 +0800 CST m=+5.594520455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:19.292 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:19.298 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:19.298 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32784f74000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:1365, start at 2024-05-06 14:36:19.309453642 +0800 CST m=+5.528566522 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:19.318 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:19.293 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:19.293 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table consistent_replicate_ddl.usertable3 not exists for 54-th check, retry later table row_format.finish_mark not exists for 49-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:22 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d8231713-5d5a-4e9d-aa0c-543016f5d386 {"id":"d8231713-5d5a-4e9d-aa0c-543016f5d386","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ddf14e8 d8231713-5d5a-4e9d-aa0c-543016f5d386 /tidb/cdc/default/default/upstream/7365771702868746877 {"id":7365771702868746877,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d8231713-5d5a-4e9d-aa0c-543016f5d386 {"id":"d8231713-5d5a-4e9d-aa0c-543016f5d386","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ddf14e8 d8231713-5d5a-4e9d-aa0c-543016f5d386 /tidb/cdc/default/default/upstream/7365771702868746877 {"id":7365771702868746877,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d8231713-5d5a-4e9d-aa0c-543016f5d386 {"id":"d8231713-5d5a-4e9d-aa0c-543016f5d386","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9ddf14e8 d8231713-5d5a-4e9d-aa0c-543016f5d386 /tidb/cdc/default/default/upstream/7365771702868746877 {"id":7365771702868746877,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2717.out cli changefeed create --start-ts=449571029774237697 '--sink-uri=mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1' Create changefeed successfully! ID: 576043ba-eaeb-42c5-afe8-cf4022ac785e Info: {"upstream_id":7365771702868746877,"namespace":"default","id":"576043ba-eaeb-42c5-afe8-cf4022ac785e","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-06T14:36:23.210035915+08:00","start_ts":449571029774237697,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571029774237697,"checkpoint_ts":449571029774237697,"checkpoint_time":"2024-05-06 14:36:17.984"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32787a800008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl, pid:1289, start at 2024-05-06 14:36:22.051885978 +0800 CST m=+5.310480789 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:22.058 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:22.048 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:22.048 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2621.out cli tso query --pd=http://127.0.0.1:2379 table row_format.finish_mark not exists for 50-th check, retry later [Mon May 6 14:36:24 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.25992601.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table consistent_replicate_ddl.usertable3 not exists for 55-th check, retry later [Mon May 6 14:36:24 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.27562758.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:24 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bb8839b0-64a7-47a3-9274-0aa9712412d0 {"id":"bb8839b0-64a7-47a3-9274-0aa9712412d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977382} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de101cf bb8839b0-64a7-47a3-9274-0aa9712412d0 /tidb/cdc/default/default/upstream/7365771709775138696 {"id":7365771709775138696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bb8839b0-64a7-47a3-9274-0aa9712412d0 {"id":"bb8839b0-64a7-47a3-9274-0aa9712412d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977382} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de101cf bb8839b0-64a7-47a3-9274-0aa9712412d0 /tidb/cdc/default/default/upstream/7365771709775138696 {"id":7365771709775138696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bb8839b0-64a7-47a3-9274-0aa9712412d0 {"id":"bb8839b0-64a7-47a3-9274-0aa9712412d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977382} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de101cf bb8839b0-64a7-47a3-9274-0aa9712412d0 /tidb/cdc/default/default/upstream/7365771709775138696 {"id":7365771709775138696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32787a800008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl, pid:1289, start at 2024-05-06 14:36:22.051885978 +0800 CST m=+5.310480789 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:22.058 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:22.048 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:22.048 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32787ccc0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rqn19-d4crb-tm2xl, pid:1375, start at 2024-05-06 14:36:22.196750047 +0800 CST m=+5.370135846 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:38:22.202 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:36:22.195 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:26:22.195 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449571031268982785 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571031268982785 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:36:25 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.26622664.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table row_format.finish_mark not exists for 51-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 56-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:27 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/958c5763-4f48-4b3a-b202-929756bdf6cb {"id":"958c5763-4f48-4b3a-b202-929756bdf6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977384} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de81dcd 958c5763-4f48-4b3a-b202-929756bdf6cb /tidb/cdc/default/default/upstream/7365771719684474214 {"id":7365771719684474214,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/958c5763-4f48-4b3a-b202-929756bdf6cb {"id":"958c5763-4f48-4b3a-b202-929756bdf6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977384} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de81dcd 958c5763-4f48-4b3a-b202-929756bdf6cb /tidb/cdc/default/default/upstream/7365771719684474214 {"id":7365771719684474214,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/958c5763-4f48-4b3a-b202-929756bdf6cb {"id":"958c5763-4f48-4b3a-b202-929756bdf6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977384} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de81dcd 958c5763-4f48-4b3a-b202-929756bdf6cb /tidb/cdc/default/default/upstream/7365771719684474214 {"id":7365771719684474214,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 10-th time, retry later 0 [Mon May 6 14:36:27 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.26652667.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:27 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f1b1ed75-4f13-4719-9e55-26a07c1d95ec {"id":"f1b1ed75-4f13-4719-9e55-26a07c1d95ec","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de5c366 f1b1ed75-4f13-4719-9e55-26a07c1d95ec /tidb/cdc/default/default/upstream/7365771709734059805 {"id":7365771709734059805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f1b1ed75-4f13-4719-9e55-26a07c1d95ec {"id":"f1b1ed75-4f13-4719-9e55-26a07c1d95ec","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de5c366 f1b1ed75-4f13-4719-9e55-26a07c1d95ec /tidb/cdc/default/default/upstream/7365771709734059805 {"id":7365771709734059805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f1b1ed75-4f13-4719-9e55-26a07c1d95ec {"id":"f1b1ed75-4f13-4719-9e55-26a07c1d95ec","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de5c366 f1b1ed75-4f13-4719-9e55-26a07c1d95ec /tidb/cdc/default/default/upstream/7365771709734059805 {"id":7365771709734059805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table row_format.finish_mark not exists for 52-th check, retry later check_changefeed_state http://127.0.0.1:2379 1106e379-4bee-4f40-8cae-e7fb45d9179f normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=1106e379-4bee-4f40-8cae-e7fb45d9179f + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 1106e379-4bee-4f40-8cae-e7fb45d9179f -s table consistent_replicate_ddl.usertable3 not exists for 57-th check, retry later + info='{ "upstream_id": 7365771709734059805, "namespace": "default", "id": "1106e379-4bee-4f40-8cae-e7fb45d9179f", "state": "normal", "checkpoint_tso": 449571032411930625, "checkpoint_time": "2024-05-06 14:36:28.046", "error": null }' + echo '{ "upstream_id": 7365771709734059805, "namespace": "default", "id": "1106e379-4bee-4f40-8cae-e7fb45d9179f", "state": "normal", "checkpoint_tso": 449571032411930625, "checkpoint_time": "2024-05-06 14:36:28.046", "error": null }' { "upstream_id": 7365771709734059805, "namespace": "default", "id": "1106e379-4bee-4f40-8cae-e7fb45d9179f", "state": "normal", "checkpoint_tso": 449571032411930625, "checkpoint_time": "2024-05-06 14:36:28.046", "error": null } ++ echo '{' '"upstream_id":' 7365771709734059805, '"namespace":' '"default",' '"id":' '"1106e379-4bee-4f40-8cae-e7fb45d9179f",' '"state":' '"normal",' '"checkpoint_tso":' 449571032411930625, '"checkpoint_time":' '"2024-05-06' '14:36:28.046",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365771709734059805, '"namespace":' '"default",' '"id":' '"1106e379-4bee-4f40-8cae-e7fb45d9179f",' '"state":' '"normal",' '"checkpoint_tso":' 449571032411930625, '"checkpoint_time":' '"2024-05-06' '14:36:28.046",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:28 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 {"id":"9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de391d8 9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 /tidb/cdc/default/default/upstream/7365771709656379915 {"id":7365771709656379915,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 {"id":"9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de391d8 9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 /tidb/cdc/default/default/upstream/7365771709656379915 {"id":7365771709656379915,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 {"id":"9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977385} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9de391d8 9ddb2af3-775a-49e5-a9f8-19c0cbfc0ca8 /tidb/cdc/default/default/upstream/7365771709656379915 {"id":7365771709656379915,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2721.out cli changefeed create --start-ts=449571031268982785 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ Create changefeed successfully! ID: 1918eac2-9afe-4dc0-8d1f-f5cefd7de92a Info: {"upstream_id":7365771709656379915,"namespace":"default","id":"1918eac2-9afe-4dc0-8d1f-f5cefd7de92a","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:36:28.691034618+08:00","start_ts":449571031268982785,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571031268982785,"checkpoint_ts":449571031268982785,"checkpoint_time":"2024-05-06 14:36:23.686"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 1-th time, retry later table row_format.finish_mark not exists for 53-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > + set +x check diff failed 2-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 58-th check, retry later < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:36:30 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4a509e25-f1ee-4b63-b1f9-2041603de9e4 {"id":"4a509e25-f1ee-4b63-b1f9-2041603de9e4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977387} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9df9e3cd 4a509e25-f1ee-4b63-b1f9-2041603de9e4 /tidb/cdc/default/default/upstream/7365771733546575005 {"id":7365771733546575005,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4a509e25-f1ee-4b63-b1f9-2041603de9e4 {"id":"4a509e25-f1ee-4b63-b1f9-2041603de9e4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977387} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9df9e3cd 4a509e25-f1ee-4b63-b1f9-2041603de9e4 /tidb/cdc/default/default/upstream/7365771733546575005 {"id":7365771733546575005,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4a509e25-f1ee-4b63-b1f9-2041603de9e4 {"id":"4a509e25-f1ee-4b63-b1f9-2041603de9e4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977387} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9df9e3cd 4a509e25-f1ee-4b63-b1f9-2041603de9e4 /tidb/cdc/default/default/upstream/7365771733546575005 {"id":7365771733546575005,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: acebb965-6142-4438-b23e-8a260fc22458 Info: {"upstream_id":7365771733546575005,"namespace":"default","id":"acebb965-6142-4438-b23e-8a260fc22458","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:36:30.482926133+08:00","start_ts":449571032176525313,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571032176525313,"checkpoint_ts":449571032176525313,"checkpoint_time":"2024-05-06 14:36:27.148"} check diff failed 2-th time, retry later table foreign_key.finish_mark not exists for 1-th check, retry later check diff successfully check_safepoint_forward http://127.0.0.1:2379 7365771719684474214 449571033133350912 449571032242061316 table consistent_replicate_ddl.usertable3 not exists for 59-th check, retry later table row_format.finish_mark not exists for 54-th check, retry later run task successfully check_changefeed_state http://127.0.0.1:2379 a0f072ad-0d3d-42e4-855d-2021084d8792 stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=a0f072ad-0d3d-42e4-855d-2021084d8792 + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c a0f072ad-0d3d-42e4-855d-2021084d8792 -s check diff failed 3-th time, retry later + info='{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' + echo '{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' { "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null } ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"stopped",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"stopped",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365771719684474214 table row_format.finish_mark not exists for 55-th check, retry later table foreign_key.finish_mark not exists for 2-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 60-th check, retry later check diff failed 4-th time, retry later table row_format.finish_mark not exists for 56-th check, retry later table foreign_key.finish_mark not exists for 3-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 61-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 20.33 secs (183482707 bytes/sec) [Pipeline] { [Pipeline] cache run task successfully check_changefeed_state http://127.0.0.1:2379 a0f072ad-0d3d-42e4-855d-2021084d8792 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=a0f072ad-0d3d-42e4-855d-2021084d8792 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c a0f072ad-0d3d-42e4-855d-2021084d8792 -s + info='{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "normal", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' + echo '{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "normal", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' { "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "normal", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null } ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"normal",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"normal",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7365771719684474214 449571033657638912 449571033657638913 check diff failed 5-th time, retry later table row_format.finish_mark not exists for 57-th check, retry later run task successfully table foreign_key.finish_mark not exists for 4-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 62-th check, retry later check_changefeed_state http://127.0.0.1:2379 a0f072ad-0d3d-42e4-855d-2021084d8792 stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=a0f072ad-0d3d-42e4-855d-2021084d8792 + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c a0f072ad-0d3d-42e4-855d-2021084d8792 -s + info='{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' + echo '{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null }' { "upstream_id": 7365771719684474214, "namespace": "default", "id": "a0f072ad-0d3d-42e4-855d-2021084d8792", "state": "stopped", "checkpoint_tso": 449571033657638913, "checkpoint_time": "2024-05-06 14:36:32.798", "error": null } ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"stopped",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"a0f072ad-0d3d-42e4-855d-2021084d8792",' '"state":' '"stopped",' '"checkpoint_tso":' 449571033657638913, '"checkpoint_time":' '"2024-05-06' '14:36:32.798",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_changefeed_state http://127.0.0.1:2379 1b2d536d-aa8f-4b1a-9461-b8c8423d53cf normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=1b2d536d-aa8f-4b1a-9461-b8c8423d53cf + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 1b2d536d-aa8f-4b1a-9461-b8c8423d53cf -s table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 not exists for 1-th check, retry later + info='{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "1b2d536d-aa8f-4b1a-9461-b8c8423d53cf", "state": "normal", "checkpoint_tso": 449571035243347972, "checkpoint_time": "2024-05-06 14:36:38.847", "error": null }' + echo '{ "upstream_id": 7365771719684474214, "namespace": "default", "id": "1b2d536d-aa8f-4b1a-9461-b8c8423d53cf", "state": "normal", "checkpoint_tso": 449571035243347972, "checkpoint_time": "2024-05-06 14:36:38.847", "error": null }' { "upstream_id": 7365771719684474214, "namespace": "default", "id": "1b2d536d-aa8f-4b1a-9461-b8c8423d53cf", "state": "normal", "checkpoint_tso": 449571035243347972, "checkpoint_time": "2024-05-06 14:36:38.847", "error": null } ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"1b2d536d-aa8f-4b1a-9461-b8c8423d53cf",' '"state":' '"normal",' '"checkpoint_tso":' 449571035243347972, '"checkpoint_time":' '"2024-05-06' '14:36:38.847",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365771719684474214, '"namespace":' '"default",' '"id":' '"1b2d536d-aa8f-4b1a-9461-b8c8423d53cf",' '"state":' '"normal",' '"checkpoint_tso":' 449571035243347972, '"checkpoint_time":' '"2024-05-06' '14:36:38.847",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365771719684474214 check diff failed 6-th time, retry later table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later table row_format.finish_mark not exists for 58-th check, retry later table foreign_key.finish_mark not exists for 5-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 63-th check, retry later check diff failed 7-th time, retry later table test.finish_mark not exists for 2-th check, retry later table row_format.finish_mark not exists for 59-th check, retry later table foreign_key.finish_mark not exists for 6-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 64-th check, retry later run task successfully Changefeed remove successfully. ID: a0f072ad-0d3d-42e4-855d-2021084d8792 CheckpointTs: 449571033657638913 SinkURI: mysql://normal:xxxxx@127.0.0.1:3306/?max-txn-row=1 check_safepoint_forward http://127.0.0.1:2379 7365771719684474214 449571035243347971 449571035243347972 449571033657638913 run task successfully check diff failed 8-th time, retry later table test.finish_mark not exists for 3-th check, retry later Changefeed remove successfully. ID: 1b2d536d-aa8f-4b1a-9461-b8c8423d53cf CheckpointTs: 449571036279078914 SinkURI: mysql://normal:xxxxx@127.0.0.1:3306/?max-txn-row=1 check_safepoint_cleared http://127.0.0.1:2379 7365771719684474214 run task successfully table row_format.finish_mark not exists for 60-th check, retry later table foreign_key.finish_mark not exists for 7-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 65-th check, retry later wait process cdc.test exit for 1-th time... cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Mon May 6 14:36:45 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>> run task failed 11-th time, retry later table test.finish_mark not exists for 4-th check, retry later check diff failed 9-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 66-th check, retry later table row_format.finish_mark not exists at last check table foreign_key.finish_mark not exists for 8-th check, retry later table test.finish_mark not exists for 5-th check, retry later check diff failed 10-th time, retry later table foreign_key.finish_mark not exists for 9-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 67-th check, retry later table foreign_key.finish_mark not exists for 10-th check, retry later table test.finish_mark not exists for 6-th check, retry later check diff failed 11-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 68-th check, retry later table foreign_key.finish_mark not exists for 11-th check, retry later check diff failed 12-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 69-th check, retry later table test.finish_mark not exists for 7-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 70-th check, retry later table foreign_key.finish_mark not exists for 12-th check, retry later table test.finish_mark not exists for 8-th check, retry later check diff failed 13-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... table consistent_replicate_ddl.usertable3 not exists for 71-th check, retry later table foreign_key.finish_mark not exists for 13-th check, retry later table test.finish_mark not exists for 9-th check, retry later check diff failed 14-th time, retry later table sink_retry.finish_mark_1 exists check diff successfully ***************** properties ***************** "mysql.db"="sink_retry" "requestdistribution"="uniform" "dotransactions"="false" "workload"="core" "mysql.port"="4000" "threadcount"="2" "insertproportion"="0" "updateproportion"="0" "recordcount"="10" "readproportion"="0" "mysql.host"="127.0.0.1" "readallfields"="true" "scanproportion"="0" "operationcount"="0" "mysql.user"="root" ********************************************** Run finished, takes 4.319982ms INSERT - Takes(s): 0.0, Count: 10, OPS: 3215.7, Avg(us): 767, Min(us): 484, Max(us): 1833, 95th(us): 2000, 99th(us): 2000 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 17.52 secs (212905902 bytes/sec) [Pipeline] { [Pipeline] cache table foreign_key.finish_mark not exists for 14-th check, retry later table test.finish_mark not exists for 10-th check, retry later check diff failed 15-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 72-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... table foreign_key.finish_mark not exists for 15-th check, retry later table test.finish_mark not exists for 11-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table consistent_replicate_ddl.usertable3 not exists for 73-th check, retry later check diff failed 16-th time, retry later table foreign_key.finish_mark not exists for 16-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table consistent_replicate_ddl.usertable3 not exists for 74-th check, retry later table test.finish_mark not exists for 12-th check, retry later check diff failed 17-th time, retry later check diff failed 1-th time, retry later check diff failed 2-th time, retry later table foreign_key.finish_mark not exists for 17-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 75-th check, retry later table test.finish_mark not exists for 13-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 18-th time, retry later check diff failed 3-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 76-th check, retry later table foreign_key.finish_mark not exists for 18-th check, retry later table test.finish_mark not exists for 14-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 19-th time, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' check diff failed 4-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 77-th check, retry later run task failed 12-th time, retry later table foreign_key.finish_mark not exists for 19-th check, retry later table test.finish_mark not exists for 15-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327b46ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:4485, start at 2024-05-06 14:37:07.914572269 +0800 CST m=+5.491880678 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:39:07.924 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:37:07.883 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:27:07.883 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 20-th time, retry later check diff failed 5-th time, retry later table foreign_key.finish_mark not exists for 20-th check, retry later table test.finish_mark not exists for 16-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 11.33 secs (329324561 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327b46ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:4485, start at 2024-05-06 14:37:07.914572269 +0800 CST m=+5.491880678 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:39:07.924 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:37:07.883 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:27:07.883 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d327b4824000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-fw71j-f959l-fmkc9, pid:4566, start at 2024-05-06 14:37:07.991536484 +0800 CST m=+5.497259620 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:39:07.999 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:37:07.977 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:27:07.977 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table consistent_replicate_ddl.usertable3 not exists for 78-th check, retry later check diff failed 21-th time, retry later table foreign_key.finish_mark not exists for 21-th check, retry later check diff failed 6-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 79-th check, retry later table test.finish_mark not exists for 17-th check, retry later [Mon May 6 14:37:12 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.58645866.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 22-th time, retry later check diff failed 7-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 80-th check, retry later table foreign_key.finish_mark not exists for 22-th check, retry later table test.finish_mark not exists for 18-th check, retry later check diff failed 23-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:37:15 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6f77e9a3-c436-451e-b13d-ce932e4a70d7 {"id":"6f77e9a3-c436-451e-b13d-ce932e4a70d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977433} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9eae4acd 6f77e9a3-c436-451e-b13d-ce932e4a70d7 /tidb/cdc/default/default/upstream/7365771930347772107 {"id":7365771930347772107,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6f77e9a3-c436-451e-b13d-ce932e4a70d7 {"id":"6f77e9a3-c436-451e-b13d-ce932e4a70d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977433} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9eae4acd 6f77e9a3-c436-451e-b13d-ce932e4a70d7 /tidb/cdc/default/default/upstream/7365771930347772107 {"id":7365771930347772107,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6f77e9a3-c436-451e-b13d-ce932e4a70d7 {"id":"6f77e9a3-c436-451e-b13d-ce932e4a70d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977433} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4c9eae4acd 6f77e9a3-c436-451e-b13d-ce932e4a70d7 /tidb/cdc/default/default/upstream/7365771930347772107 {"id":7365771930347772107,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check diff failed 8-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 81-th check, retry later table changefeed_pause_resume.t1 not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 23-th check, retry later table test.finish_mark not exists for 19-th check, retry later check diff failed 24-th time, retry later check diff failed 9-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 82-th check, retry later table changefeed_pause_resume.t1 exists table changefeed_pause_resume.t2 not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 24-th check, retry later table test.finish_mark not exists for 20-th check, retry later check diff failed 25-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 83-th check, retry later table foreign_key.finish_mark not exists for 25-th check, retry later table test.finish_mark not exists for 21-th check, retry later check diff failed 10-th time, retry later table changefeed_pause_resume.t2 exists table changefeed_pause_resume.t3 not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 26-th check, retry later check diff failed 26-th time, retry later check diff failed 11-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 84-th check, retry later table changefeed_pause_resume.t3 exists table test.finish_mark not exists for 22-th check, retry later check diff failed 1-th time, retry later check diff failed 27-th time, retry later check diff failed 12-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 85-th check, retry later table foreign_key.finish_mark not exists for 27-th check, retry later table test.finish_mark not exists for 23-th check, retry later check diff failed 28-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 86-th check, retry later table foreign_key.finish_mark not exists for 28-th check, retry later check diff failed 2-th time, retry later table test.finish_mark not exists for 24-th check, retry later check diff failed 13-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 87-th check, retry later table foreign_key.finish_mark not exists for 29-th check, retry later check diff failed 3-th time, retry later table test.finish_mark not exists for 25-th check, retry later check diff failed 29-th time, retry later check diff failed 14-th time, retry later table sink_retry.finish_mark_2 not exists for 1-th check, retry later table sink_retry.finish_mark_2 not exists for 2-th check, retry later table sink_retry.finish_mark_2 not exists for 3-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 88-th check, retry later table foreign_key.finish_mark not exists for 30-th check, retry later check diff failed 4-th time, retry later table test.finish_mark not exists for 26-th check, retry later check diff failed 30-th time, retry later check diff failed 15-th time, retry later table sink_retry.finish_mark_2 not exists for 4-th check, retry later table foreign_key.finish_mark not exists for 31-th check, retry later check diff failed at last A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t6` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t3`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t2`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t2` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t2`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t4` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t3` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t4`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/8 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/7 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t3` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/6 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/6 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t10` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t10`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/7 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/8 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t3`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t1`` ... failure Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t2`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t8` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' A total of 10 tables need to be compared Comparing the table structure of ``processor_err_chan`.`t8`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``processor_err_chan`.`t5`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table structure of ``processor_err_chan`.`t6`` ... equivalent Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/3 Comparing the table structure of ``processor_err_chan`.`t9`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/4 Comparing the table structure of ``processor_err_chan`.`t7`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/5 Comparing the table structure of ``processor_err_chan`.`t4`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/6 Comparing the table structure of ``processor_err_chan`.`t2`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/7 Comparing the table structure of ``processor_err_chan`.`t3`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/8 Comparing the table structure of ``processor_err_chan`.`t10`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/9 Comparing the table structure of ``processor_err_chan`.`t1`` ... equivalent Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t8`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/10 Comparing the table data of ``processor_err_chan`.`t8`` ... failure Comparing the table data of ``processor_err_chan`.`t9`` ... Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [======>------------------------------------------------------] 10% 0/9 Comparing the table data of ``processor_err_chan`.`t9`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t6`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [============>------------------------------------------------] 20% 0/8 Comparing the table data of ``processor_err_chan`.`t6`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t5`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==================>------------------------------------------] 30% 0/7 Comparing the table data of ``processor_err_chan`.`t5`` ... failure Comparing the table data of ``processor_err_chan`.`t7`` ... Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [========================>------------------------------------] 40% 0/6 Comparing the table data of ``processor_err_chan`.`t7`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t3`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/5 Comparing the table data of ``processor_err_chan`.`t3`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t2`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [====================================>------------------------] 60% 0/4 Comparing the table data of ``processor_err_chan`.`t2`` ... failure Comparing the table data of ``processor_err_chan`.`t4`` ... Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [==========================================>------------------] 70% 0/3 Comparing the table data of ``processor_err_chan`.`t4`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... Comparing the table data of ``processor_err_chan`.`t10`` ... _____________________________________________________________________________ Progress [================================================>------------] 80% 0/2 Comparing the table data of ``processor_err_chan`.`t10`` ... failure Comparing the table data of ``processor_err_chan`.`t1`` ... _____________________________________________________________________________ Progress [======================================================>------] 90% 0/1 Comparing the table data of ``processor_err_chan`.`t1`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `processor_err_chan`.`t2` is not equal The data of `processor_err_chan`.`t1` is not equal The data of `processor_err_chan`.`t10` is not equal The data of `processor_err_chan`.`t4` is not equal The data of `processor_err_chan`.`t3` is not equal The data of `processor_err_chan`.`t9` is not equal The data of `processor_err_chan`.`t8` is not equal The data of `processor_err_chan`.`t7` is not equal The data of `processor_err_chan`.`t6` is not equal The data of `processor_err_chan`.`t5` is not equal The rest of tables are all equal. A total of 10 tables have been compared, 0 tables finished, 10 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/sync_diff.log' [2024/05/06 14:37:30.713 +08:00] [INFO] [printer.go:46] ["Welcome to sync_diff_inspector"] ["Release Version"=v7.4.0] ["Git Commit Hash"=d671b0840063bc2532941f02e02e12627402844c] ["Git Branch"=heads/refs/tags/v7.4.0] ["UTC Build Time"="2023-09-22 03:51:56"] ["Go Version"=go1.21.1] [2024/05/06 14:37:30.714 +08:00] [INFO] [main.go:101] [config="{\"check-thread-count\":4,\"split-thread-count\":5,\"export-fix-sql\":true,\"check-struct-only\":false,\"dm-addr\":\"\",\"dm-task\":\"\",\"data-sources\":{\"mysql1\":{\"host\":\"127.0.0.1\",\"port\":4000,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null},\"tidb0\":{\"host\":\"127.0.0.1\",\"port\":3306,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null}},\"routes\":null,\"table-configs\":null,\"task\":{\"source-instances\":[\"mysql1\"],\"source-routes\":null,\"target-instance\":\"tidb0\",\"target-check-tables\":[\"processor_err_chan.?*\"],\"target-configs\":null,\"output-dir\":\"/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output\",\"SourceInstances\":[{\"host\":\"127.0.0.1\",\"port\":4000,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null}],\"TargetInstance\":{\"host\":\"127.0.0.1\",\"port\":3306,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null},\"TargetTableConfigs\":null,\"TargetCheckTables\":[{}],\"FixDir\":\"/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/fix-on-tidb0\",\"CheckpointDir\":\"/tmp/tidb_cdc_test/processor_err_chan/sync_diff/output/checkpoint\",\"HashFile\":\"\"},\"ConfigFile\":\"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/processor_err_chan/conf/diff_config.toml\",\"PrintVersion\":false}"] [2024/05/06 14:37:30.714 +08:00] [DEBUG] [diff.go:842] ["set tidb cfg"] [2024/05/06 14:37:30.718 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `processor_err_chan` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.718 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `test` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.719 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t1`] [2024/05/06 14:37:30.721 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t10`] [2024/05/06 14:37:30.722 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t2`] [2024/05/06 14:37:30.724 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t3`] [2024/05/06 14:37:30.725 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t4`] [2024/05/06 14:37:30.727 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t5`] [2024/05/06 14:37:30.728 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t6`] [2024/05/06 14:37:30.729 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t7`] [2024/05/06 14:37:30.730 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t8`] [2024/05/06 14:37:30.732 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`processor_err_chan`.`t9`] [2024/05/06 14:37:30.733 +08:00] [INFO] [tidb.go:209] ["find router for tidb source"] [2024/05/06 14:37:30.733 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `processor_err_chan` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.734 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `test` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.734 +08:00] [INFO] [source.go:412] ["table match check finished"] [2024/05/06 14:37:30.735 +08:00] [INFO] [tidb.go:209] ["find router for tidb source"] [2024/05/06 14:37:30.735 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `processor_err_chan` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.735 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `test` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:37:30.735 +08:00] [INFO] [source.go:412] ["table match check finished"] [2024/05/06 14:37:30.736 +08:00] [INFO] [diff.go:358] ["The upstream is TiDB. pick it as work source candidate"] [2024/05/06 14:37:30.744 +08:00] [INFO] [pd_service_discovery.go:628] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/05/06 14:37:30.744 +08:00] [INFO] [pd_service_discovery.go:195] ["[pd] init cluster id"] [cluster-id=7365771709734059805] [2024/05/06 14:37:30.744 +08:00] [INFO] [client.go:607] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/06 14:37:30.744 +08:00] [INFO] [tso_client.go:230] ["[tso] switch dc tso global allocator serving address"] [dc-location=global] [new-address=http://127.0.0.1:2379] [2024/05/06 14:37:30.745 +08:00] [INFO] [tso_dispatcher.go:313] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/06 14:37:30.745 +08:00] [INFO] [client.go:655] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/06 14:37:30.745 +08:00] [INFO] [pd.go:212] ["tidb support auto gc safepoint"] [version=8.2.0-alpha-80-g06ee59bd9c] [2024/05/06 14:37:30.746 +08:00] [INFO] [diff.go:349] ["start update service to keep GC stopped automatically"] [2024/05/06 14:37:30.746 +08:00] [INFO] [pd.go:227] ["generate dumpling gc safePoint id"] [id=Sync_diff_1714977450746117010] [2024/05/06 14:37:30.746 +08:00] [DEBUG] [pd.go:229] ["update PD safePoint limit with ttl"] [safePoint=449571048835252237] [updateInterval=2m30s] [2024/05/06 14:37:30.746 +08:00] [INFO] [diff.go:363] ["The downstream is TiDB. pick it as work source first"] [2024/05/06 14:37:30.752 +08:00] [INFO] [pd_service_discovery.go:628] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2479] [old-leader=] [2024/05/06 14:37:30.752 +08:00] [INFO] [pd_service_discovery.go:195] ["[pd] init cluster id"] [cluster-id=7365771716407795605] [2024/05/06 14:37:30.752 +08:00] [INFO] [client.go:607] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/06 14:37:30.752 +08:00] [INFO] [tso_client.go:230] ["[tso] switch dc tso global allocator serving address"] [dc-location=global] [new-address=http://127.0.0.1:2479] [2024/05/06 14:37:30.753 +08:00] [INFO] [tso_dispatcher.go:313] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/06 14:37:30.753 +08:00] [INFO] [client.go:655] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/06 14:37:30.754 +08:00] [INFO] [pd.go:212] ["tidb support auto gc safepoint"] [version=8.2.0-alpha-80-g06ee59bd9c] [2024/05/06 14:37:30.754 +08:00] [INFO] [diff.go:349] ["start update service to keep GC stopped automatically"] [2024/05/06 14:37:30.754 +08:00] [INFO] [pd.go:227] ["generate dumpling gc safePoint id"] [id=Sync_diff_1714977450754168601] [2024/05/06 14:37:30.754 +08:00] [INFO] [diff.go:191] ["not found checkpoint file, start from beginning"] [2024/05/06 14:37:30.754 +08:00] [DEBUG] [pd.go:229] ["update PD safePoint limit with ttl"] [safePoint=449571048846786565] [updateInterval=2m30s] [2024/05/06 14:37:30.770 +08:00] [INFO] [diff.go:721] ["start writeSQLs goroutine"] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool="chunks producer"] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t8] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t7] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t5] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t6] [2024/05/06 14:37:30.770 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t9] [2024/05/06 14:37:30.770 +08:00] [INFO] [diff.go:377] ["start handleCheckpoint goroutine"] [2024/05/06 14:37:30.771 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.771 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t8` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.772 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.772 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t5` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.772 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.772 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t6` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.773 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t8] [2024/05/06 14:37:30.773 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool="chunks producer"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t4] [2024/05/06 14:37:30.773 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":1,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.773 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t9` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t8` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t8` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.773 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t7` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.773 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.773 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t4` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t5] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t6] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool="chunks producer"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool="chunks producer"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t3] [2024/05/06 14:37:30.774 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":4,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t2] [2024/05/06 14:37:30.774 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":3,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t9] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool="chunks producer"] [2024/05/06 14:37:30.774 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":0,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t1] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t5` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t6` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t9` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t9` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t6` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t5` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t7] [2024/05/06 14:37:30.774 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [common.go:525] [GetBucketsInfo] [sql="SHOW STATS_BUCKETS WHERE db_name= ? AND table_name= ?;"] [schema=processor_err_chan] [table=t10] [2024/05/06 14:37:30.774 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":2,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.774 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.775 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.775 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t3` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.775 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t4] [2024/05/06 14:37:30.775 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.775 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.775 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t2` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.775 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.775 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t10` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.775 +08:00] [INFO] [tidb.go:58] ["failed to build bucket iterator, fall back to use random iterator"] [error="primary key on id in buckets info not found"] [errorVerbose="primary key on id in buckets info not found\ngithub.com/pingcap/errors.NotFoundf\n\t/go/pkg/mod/github.com/pingcap/errors@v0.11.5-0.20221009092201-b66cddb77c32/juju_adaptor.go:117\ngithub.com/pingcap/tidb-tools/pkg/dbutil.GetBucketsInfo\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/pkg/dbutil/common.go:576\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.(*BucketIterator).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:139\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/splitter.NewBucketIteratorWithCheckpoint\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/splitter/bucket.go:80\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*TiDBTableAnalyzer).AnalyzeSplitter\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:54\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.(*ChunksIterator).produceChunks.func3\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/chunks_iter.go:133\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/utils.(*WorkerPool).Apply.func1\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/utils/utils.go:94\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650"] [2024/05/06 14:37:30.775 +08:00] [DEBUG] [common.go:237] ["get row count"] [sql="SELECT COUNT(1) cnt FROM `processor_err_chan`.`t1` WHERE TRUE"] [args=null] [2024/05/06 14:37:30.776 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t2] [2024/05/06 14:37:30.776 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":1,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t8] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.776 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.776 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t8` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.776 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t3] [2024/05/06 14:37:30.776 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.777 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t10] [2024/05/06 14:37:30.777 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.777 +08:00] [INFO] [random.go:110] ["get chunk size for table"] ["chunk size"=50000] [db=processor_err_chan] [table=t1] [2024/05/06 14:37:30.777 +08:00] [INFO] [random.go:116] ["split range by random"] ["row count"=0] ["split chunk num"=0] [2024/05/06 14:37:30.777 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":3,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t6] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.777 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t6` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.778 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":0,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t9] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.778 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":4,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t5] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.778 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t5` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.778 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t9` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.778 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t8` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t6` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t9` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t5` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (1);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (2);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (3);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (4);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (5);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (6);"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t8`(`id`) VALUES (7);"] [2024/05/06 14:37:30.779 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":5,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t7` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":1,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t7` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.779 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (1);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (2);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (3);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (4);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (5);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (6);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t9`(`id`) VALUES (7);"] [2024/05/06 14:37:30.780 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":7,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (1);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (2);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (3);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (4);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (5);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (1);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (6);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":0,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t6`(`id`) VALUES (7);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t4` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":6,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (2);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (3);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (4);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (5);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (6);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t2` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t5`(`id`) VALUES (7);"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t4` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":9,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":3,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t2` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t3` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t3` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.780 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":4,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":2,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t7] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t7` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":6,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t3] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t3` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":7,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t2] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t2` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":5,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t4] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t4` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.782 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t7` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t3` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t2` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t4` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (1);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (2);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (3);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (4);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (5);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (6);"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t7`(`id`) VALUES (7);"] [2024/05/06 14:37:30.783 +08:00] [INFO] [diff.go:280] ["global consume chunk info"] ["chunk index"="{\"table-index\":8,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] ["chunk bound"="[]"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [utils.go:104] ["wait for workers"] [pool=consumer] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t10` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t10` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.783 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":2,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (1);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (2);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (3);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (4);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (5);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (6);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t3`(`id`) VALUES (7);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (1);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (2);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (3);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t1` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (4);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (5);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (6);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t2`(`id`) VALUES (7);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [utils.go:766] ["table columns"] [columns="[{\"id\":1,\"name\":{\"O\":\"id\",\"L\":\"id\"},\"offset\":0,\"origin_default\":null,\"origin_default_bit\":null,\"default\":null,\"default_bit\":null,\"default_is_expr\":false,\"generated_expr_string\":\"\",\"generated_stored\":false,\"dependences\":null,\"type\":{\"Tp\":3,\"Flag\":515,\"Flen\":11,\"Decimal\":0,\"Charset\":\"binary\",\"Collate\":\"binary\",\"Elems\":null,\"ElemsIsBinaryLit\":null,\"Array\":false},\"state\":5,\"comment\":\"\",\"hidden\":false,\"change_state_info\":null,\"version\":2}]"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [utils.go:785] ["count and checksum"] [sql="SELECT COUNT(*) as CNT, BIT_XOR(CAST(CRC32(CONCAT_WS(',', `id`, CONCAT(ISNULL(`id`))))AS UNSIGNED)) as CHECKSUM FROM `processor_err_chan`.`t1` WHERE ((TRUE) AND (TRUE));"] [args=null] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":6,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":7,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (1);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (2);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (3);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (4);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (5);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (6);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t4`(`id`) VALUES (7);"] [2024/05/06 14:37:30.784 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":5,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.785 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":9,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t10] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.785 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t10` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.785 +08:00] [DEBUG] [diff.go:604] ["checksum failed"] ["chunk id"="{\"table-index\":8,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [table=t1] ["upstream chunk size"=7] ["downstream chunk size"=0] ["upstream checksum"=3301074816] ["downstream checksum"=0] [2024/05/06 14:37:30.785 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t1` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.786 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t10` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.786 +08:00] [DEBUG] [tidb.go:189] ["select data"] [sql="SELECT /*!40001 SQL_NO_CACHE */ `id` FROM `processor_err_chan`.`t1` WHERE ((TRUE) AND (TRUE)) ORDER BY `id`"] [args=null] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (1);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (2);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (3);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (4);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (5);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (6);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t10`(`id`) VALUES (7);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":9,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (1);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (2);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (3);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (4);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (5);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (6);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:663] ["[insert]"] [sql="REPLACE INTO `processor_err_chan`.`t1`(`id`) VALUES (7);"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:263] ["all consume tasks finished"] [2024/05/06 14:37:30.787 +08:00] [DEBUG] [diff.go:762] ["insert node"] ["chunk index"="{\"table-index\":8,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0}"] [2024/05/06 14:37:30.787 +08:00] [INFO] [diff.go:732] ["write sql channel closed"] [2024/05/06 14:37:30.787 +08:00] [INFO] [diff.go:723] ["close writeSQLs goroutine"] [2024/05/06 14:37:30.787 +08:00] [INFO] [diff.go:405] ["Stop do checkpoint"] [2024/05/06 14:37:30.787 +08:00] [INFO] [checkpoints.go:225] ["save checkpoint"] [chunk="{\"state\":\"failed\",\"chunk-range\":{\"index\":{\"table-index\":9,\"bucket-index-left\":0,\"bucket-index-right\":0,\"chunk-index\":0,\"chunk-count\":0},\"type\":2,\"bounds\":[],\"is-first\":false,\"is-last\":false,\"where\":\"((TRUE) AND (TRUE))\",\"args\":null},\"index-id\":0}"] [state=failed] [2024/05/06 14:37:30.787 +08:00] [INFO] [diff.go:379] ["close handleCheckpoint goroutine"] [2024/05/06 14:37:30.789 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t2`] [] [2024/05/06 14:37:30.790 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t1`] [] [2024/05/06 14:37:30.791 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t10`] [] [2024/05/06 14:37:30.792 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t5`] [] [2024/05/06 14:37:30.792 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t4`] [] [2024/05/06 14:37:30.793 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t3`] [] [2024/05/06 14:37:30.794 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t9`] [] [2024/05/06 14:37:30.795 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t8`] [] [2024/05/06 14:37:30.796 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t7`] [] [2024/05/06 14:37:30.796 +08:00] [WARN] [report.go:164] ["fail to get the correct size of table, if you want to get the correct size, please analyze the corresponding tables"] [table=`processor_err_chan`.`t6`] [] [2024/05/06 14:37:30.798 +08:00] [INFO] [main.go:114] ["check data finished"] [cost=83.660285ms] [2024/05/06 14:37:30.798 +08:00] [WARN] [main.go:105] ["check failed!!!"] check diff failed 16-th time, retry later table consistent_replicate_ddl.usertable3 not exists for 89-th check, retry later table test.finish_mark not exists for 27-th check, retry later check diff successfully table sink_retry.finish_mark_2 not exists for 5-th check, retry later check diff failed 1-th time, retry later table foreign_key.finish_mark not exists for 32-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 90-th check, retry later table test.finish_mark not exists for 28-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 13-th time, retry later check diff failed 17-th time, retry later table sink_retry.finish_mark_2 not exists for 6-th check, retry later table foreign_key.finish_mark not exists for 33-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 91-th check, retry later table test.finish_mark not exists for 29-th check, retry later check diff failed 2-th time, retry later check diff failed 18-th time, retry later table sink_retry.finish_mark_2 not exists for 7-th check, retry later table foreign_key.finish_mark not exists for 34-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 92-th check, retry later table test.finish_mark not exists for 30-th check, retry later check diff failed 3-th time, retry later check diff failed 19-th time, retry later table sink_retry.finish_mark_2 not exists for 8-th check, retry later table foreign_key.finish_mark not exists for 35-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 93-th check, retry later table test.finish_mark not exists for 31-th check, retry later check diff failed 4-th time, retry later table sink_retry.finish_mark_2 not exists for 9-th check, retry later check diff failed 20-th time, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 28.90 secs (129068919 bytes/sec) [Pipeline] { [Pipeline] cache table foreign_key.finish_mark not exists for 36-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 94-th check, retry later table sink_retry.finish_mark_2 not exists for 10-th check, retry later table test.finish_mark not exists for 32-th check, retry later check diff failed 5-th time, retry later check diff failed 21-th time, retry later table foreign_key.finish_mark not exists for 37-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 95-th check, retry later table test.finish_mark not exists for 33-th check, retry later check diff successfully [2024/05/06 14:37:45.118 +08:00] [ERROR] [request.go:310] ["failed to send a http request"] [error="Get \"http://127.0.0.1:8300/api/v2/status\": dial tcp 127.0.0.1:8300: connect: connection refused"] Error: Get "http://127.0.0.1:8300/api/v2/status": dial tcp 127.0.0.1:8300: connect: connection refused check diff failed 22-th time, retry later table sink_retry.finish_mark_2 not exists for 11-th check, retry later table foreign_key.finish_mark not exists for 38-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 96-th check, retry later table test.finish_mark not exists for 34-th check, retry later check diff failed 23-th time, retry later table sink_retry.finish_mark_2 not exists for 12-th check, retry later table foreign_key.finish_mark not exists for 39-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 97-th check, retry later table test.finish_mark not exists for 35-th check, retry later table sink_retry.finish_mark_2 not exists for 13-th check, retry later check diff failed 24-th time, retry later table foreign_key.finish_mark not exists for 40-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 98-th check, retry later table test.finish_mark not exists for 36-th check, retry later table sink_retry.finish_mark_2 not exists for 14-th check, retry later check diff failed 25-th time, retry later table foreign_key.finish_mark not exists for 41-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 99-th check, retry later table test.finish_mark not exists for 37-th check, retry later table sink_retry.finish_mark_2 not exists for 15-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 8.27 secs (450893004 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 26-th time, retry later table test.finish_mark not exists for 38-th check, retry later table foreign_key.finish_mark not exists for 42-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 100-th check, retry later table sink_retry.finish_mark_2 not exists for 16-th check, retry later check diff failed 27-th time, retry later table foreign_key.finish_mark not exists for 43-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 101-th check, retry later table test.finish_mark not exists for 39-th check, retry later table sink_retry.finish_mark_2 not exists for 17-th check, retry later check diff failed 28-th time, retry later table foreign_key.finish_mark not exists for 44-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 102-th check, retry later table test.finish_mark not exists for 40-th check, retry later table sink_retry.finish_mark_2 not exists for 18-th check, retry later check diff failed 29-th time, retry later table foreign_key.finish_mark not exists for 45-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 103-th check, retry later table test.finish_mark not exists for 41-th check, retry later table sink_retry.finish_mark_2 not exists for 19-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 14-th time, retry later check diff failed 30-th time, retry later table foreign_key.finish_mark not exists for 46-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 104-th check, retry later table test.finish_mark not exists for 42-th check, retry later table sink_retry.finish_mark_2 not exists for 20-th check, retry later check diff failed at last There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log There is something error when initialize diff, please check log info in /tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/sync_diff.log [2024/05/06 14:38:01.930 +08:00] [INFO] [printer.go:46] ["Welcome to sync_diff_inspector"] ["Release Version"=v7.4.0] ["Git Commit Hash"=d671b0840063bc2532941f02e02e12627402844c] ["Git Branch"=heads/refs/tags/v7.4.0] ["UTC Build Time"="2023-09-22 03:51:56"] ["Go Version"=go1.21.1] [2024/05/06 14:38:01.931 +08:00] [INFO] [main.go:101] [config="{\"check-thread-count\":4,\"split-thread-count\":5,\"export-fix-sql\":true,\"check-struct-only\":false,\"dm-addr\":\"\",\"dm-task\":\"\",\"data-sources\":{\"mysql1\":{\"host\":\"127.0.0.1\",\"port\":4000,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null},\"tidb0\":{\"host\":\"127.0.0.1\",\"port\":3306,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null}},\"routes\":null,\"table-configs\":null,\"task\":{\"source-instances\":[\"mysql1\"],\"source-routes\":null,\"target-instance\":\"tidb0\",\"target-check-tables\":[\"kv_client_stream_reconnect.?*\"],\"target-configs\":null,\"output-dir\":\"/tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output\",\"SourceInstances\":[{\"host\":\"127.0.0.1\",\"port\":4000,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null}],\"TargetInstance\":{\"host\":\"127.0.0.1\",\"port\":3306,\"user\":\"root\",\"password\":\"******\",\"sql-mode\":\"\",\"snapshot\":\"\",\"security\":null,\"route-rules\":null,\"Router\":{\"Selector\":{}},\"Conn\":null},\"TargetTableConfigs\":null,\"TargetCheckTables\":[{}],\"FixDir\":\"/tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/fix-on-tidb0\",\"CheckpointDir\":\"/tmp/tidb_cdc_test/kv_client_stream_reconnect/sync_diff/output/checkpoint\",\"HashFile\":\"\"},\"ConfigFile\":\"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/conf/diff_config.toml\",\"PrintVersion\":false}"] [2024/05/06 14:38:01.931 +08:00] [DEBUG] [diff.go:842] ["set tidb cfg"] [2024/05/06 14:38:01.934 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `kv_client_stream_reconnect` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:38:01.935 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `test` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:38:01.935 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`kv_client_stream_reconnect`.`t1`] [2024/05/06 14:38:01.937 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`kv_client_stream_reconnect`.`t2`] [2024/05/06 14:38:01.938 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`kv_client_stream_reconnect`.`t3`] [2024/05/06 14:38:01.940 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`kv_client_stream_reconnect`.`t4`] [2024/05/06 14:38:01.941 +08:00] [DEBUG] [source.go:326] ["match target table"] [table=`kv_client_stream_reconnect`.`t5`] [2024/05/06 14:38:01.943 +08:00] [INFO] [tidb.go:209] ["find router for tidb source"] [2024/05/06 14:38:01.943 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `kv_client_stream_reconnect` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:38:01.943 +08:00] [DEBUG] [common.go:386] ["query tables"] [query="SHOW FULL TABLES IN `test` WHERE Table_Type = 'BASE TABLE';"] [2024/05/06 14:38:01.943 +08:00] [FATAL] [main.go:120] ["failed to initialize diff process"] [error="from upstream: please make sure the filter is correct.: the target has no table to be compared. source-table is ``kv_client_stream_reconnect`.`t10``"] [errorVerbose="the target has no table to be compared. source-table is ``kv_client_stream_reconnect`.`t10``\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.checkTableMatched\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/source.go:401\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.NewTiDBSource\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/tidb.go:267\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.buildSourceFromCfg\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/source.go:231\ngithub.com/pingcap/tidb-tools/sync_diff_inspector/source.NewSources\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/source/source.go:206\nmain.(*Diff).init\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/diff.go:137\nmain.NewDiff\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/diff.go:95\nmain.checkSyncState\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/main.go:117\nmain.main\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/main.go:104\nruntime.main\n\t/usr/local/go/src/runtime/proc.go:267\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1650\nplease make sure the filter is correct.\nfrom upstream"] [stack="main.checkSyncState\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/main.go:120\nmain.main\n\t/home/jenkins/agent/workspace/build-common/go/src/github.com/pingcap/tidb-tools/sync_diff_inspector/main.go:104\nruntime.main\n\t/usr/local/go/src/runtime/proc.go:267"] table foreign_key.finish_mark not exists for 47-th check, retry later table sink_retry.finish_mark_2 not exists for 21-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 105-th check, retry later table test.finish_mark not exists for 43-th check, retry later table foreign_key.finish_mark not exists for 48-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 106-th check, retry later table test.finish_mark not exists for 44-th check, retry later table sink_retry.finish_mark_2 not exists for 22-th check, retry later table foreign_key.finish_mark not exists for 49-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 107-th check, retry later table test.finish_mark not exists for 45-th check, retry later table sink_retry.finish_mark_2 not exists for 23-th check, retry later table foreign_key.finish_mark not exists for 50-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 108-th check, retry later table test.finish_mark not exists for 46-th check, retry later table sink_retry.finish_mark_2 not exists for 24-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 109-th check, retry later table test.finish_mark not exists for 47-th check, retry later table foreign_key.finish_mark not exists for 51-th check, retry later table sink_retry.finish_mark_2 not exists for 25-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 18.32 secs (203664131 bytes/sec) [Pipeline] { [Pipeline] cache table consistent_replicate_ddl.usertable3 not exists for 110-th check, retry later table foreign_key.finish_mark not exists for 52-th check, retry later table sink_retry.finish_mark_2 not exists for 26-th check, retry later table test.finish_mark not exists for 48-th check, retry later table foreign_key.finish_mark not exists for 53-th check, retry later table sink_retry.finish_mark_2 not exists for 27-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 111-th check, retry later table test.finish_mark not exists for 49-th check, retry later table foreign_key.finish_mark not exists for 54-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 112-th check, retry later table sink_retry.finish_mark_2 not exists for 28-th check, retry later table test.finish_mark not exists for 50-th check, retry later table foreign_key.finish_mark not exists for 55-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 113-th check, retry later table sink_retry.finish_mark_2 not exists for 29-th check, retry later table test.finish_mark not exists for 51-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 114-th check, retry later table foreign_key.finish_mark not exists for 56-th check, retry later table sink_retry.finish_mark_2 not exists for 30-th check, retry later table test.finish_mark not exists for 52-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 8.11 secs (459709650 bytes/sec) [Pipeline] { [Pipeline] cache table consistent_replicate_ddl.usertable3 not exists for 115-th check, retry later table foreign_key.finish_mark not exists for 57-th check, retry later table sink_retry.finish_mark_2 not exists for 31-th check, retry later table test.finish_mark not exists for 53-th check, retry later table foreign_key.finish_mark not exists for 58-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 116-th check, retry later table sink_retry.finish_mark_2 not exists for 32-th check, retry later table test.finish_mark not exists for 54-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 15-th time, retry later table foreign_key.finish_mark not exists for 59-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 117-th check, retry later table test.finish_mark not exists for 55-th check, retry later table sink_retry.finish_mark_2 not exists for 33-th check, retry later table foreign_key.finish_mark not exists for 60-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 118-th check, retry later table test.finish_mark not exists for 56-th check, retry later table sink_retry.finish_mark_2 not exists for 34-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 119-th check, retry later table foreign_key.finish_mark not exists at last check table test.finish_mark not exists for 57-th check, retry later table sink_retry.finish_mark_2 not exists for 35-th check, retry later table consistent_replicate_ddl.usertable3 not exists for 120-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 8.00 secs (466523149 bytes/sec) [Pipeline] { [Pipeline] cache table sink_retry.finish_mark_2 not exists for 36-th check, retry later table test.finish_mark not exists for 58-th check, retry later table consistent_replicate_ddl.usertable3 not exists at last check count(*) 50 table sink_retry.finish_mark_2 not exists for 37-th check, retry later table test.finish_mark not exists for 59-th check, retry later table test.finish_mark not exists for 60-th check, retry later table sink_retry.finish_mark_2 not exists for 38-th check, retry later table test.finish_mark not exists at last check table sink_retry.finish_mark_2 not exists for 39-th check, retry later table sink_retry.finish_mark_2 not exists for 40-th check, retry later table sink_retry.finish_mark_2 not exists for 41-th check, retry later table sink_retry.finish_mark_2 not exists for 42-th check, retry later table sink_retry.finish_mark_2 not exists for 43-th check, retry later table sink_retry.finish_mark_2 not exists for 44-th check, retry later table sink_retry.finish_mark_2 not exists for 45-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 17.92 secs (208154093 bytes/sec) [Pipeline] { table sink_retry.finish_mark_2 not exists for 46-th check, retry later [Pipeline] cache Aborted by Jenkins Admin table sink_retry.finish_mark_2 not exists for 47-th check, retry later table sink_retry.finish_mark_2 not exists for 48-th check, retry later cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 16-th time, retry later table sink_retry.finish_mark_2 not exists for 49-th check, retry later table sink_retry.finish_mark_2 not exists for 50-th check, retry later table sink_retry.finish_mark_2 not exists for 51-th check, retry later table sink_retry.finish_mark_2 not exists for 52-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 10.66 secs (350035403 bytes/sec) [Pipeline] { [Pipeline] cache table sink_retry.finish_mark_2 not exists for 53-th check, retry later Click here to forcibly terminate running steps table sink_retry.finish_mark_2 not exists for 54-th check, retry later table sink_retry.finish_mark_2 not exists for 55-th check, retry later table sink_retry.finish_mark_2 not exists for 56-th check, retry later table sink_retry.finish_mark_2 not exists for 57-th check, retry later table sink_retry.finish_mark_2 not exists for 58-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 8.67 secs (430298034 bytes/sec) [Pipeline] { [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=1959de83-31ee-412b-b976-1da1f36dfc10 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-n74j6 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv pingcap_tiflow_pull_cdc_integration_test_1781-n74j6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G15 Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=428b83d4-5582-4c30-b9f3-c2c2ad89b576 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-6fqqh GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-6fqqh pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh table sink_retry.finish_mark_2 not exists for 59-th check, retry later [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G14 Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=be58bdc1-6a3a-4edd-9488-efb4d1ab134a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-s7cvf GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq pingcap_tiflow_pull_cdc_integration_test_1781-s7cvf GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G19 Run cases: changefeed_fast_fail batch_update_to_no_batch changefeed_resume_with_checkpoint_ts PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=04ed0a1b-cdfb-4334-9dda-f74f8ff832e5 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G19 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-rnlk7 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-rnlk7 pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/changefeed_fast_fail/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G21 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=c6cde71f-2f30-4260-9f20-07ddc84eb3c8 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-krg3n GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f pingcap_tiflow_pull_cdc_integration_test_1781-krg3n GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: mysql... <<================= Run cases: bank kill_owner_with_ddl owner_remove_table_error PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=bbed560c-2423-4353-a108-7700b4cdb05e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G21 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-1tgp6 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-1tgp6 pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/bank/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=05d278fc-3f0f-438a-84ff-f933bb5b5302 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-nrwqc GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk pingcap_tiflow_pull_cdc_integration_test_1781-nrwqc GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G18 Run cases: availability http_proxies sequence PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=39c89193-8daa-44ff-ba9b-a133396e56a8 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G18 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-3nzd5 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-3nzd5 pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/availability/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G10 Run cases: default_value simple cdc_server_tips event_filter sql_mode PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=3ac76f82-a411-41f3-9ecd-f3ee947448f9 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G10 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-56k4h GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc pingcap_tiflow_pull_cdc_integration_test_1781-56k4h GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: mysql... <<================= [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G20 Run cases: tidb_mysql_test ddl_reentrant multi_cdc_cluster PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=013cf4bf-b712-4a55-8e8d-fa732c827cb2 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G20 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-8z2x2 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-8z2x2 pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/tidb_mysql_test/run.sh using Sink-Type: mysql... <<================= [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=598c9189-33b6-4ebe-818c-79fdddb603d7 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-n5dsj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap_tiflow_pull_cdc_integration_test_1781-n5dsj pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: mysql... <<================= The 1 times to try to start tidb cluster... table sink_retry.finish_mark_2 not exists for 60-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/changefeed_fast_fail Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/bank Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table sink_retry.finish_mark_2 not exists at last check start tidb cluster in /tmp/tidb_cdc_test/owner_resign Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/availability Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Verifying downstream PD is started... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... The 1 times to try to start tidb cluster... Verifying downstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/clustered_index Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/default_value Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release cdc.test cli capture list --server http://127.0.0.1:8301 2>&1 |grep c9a1d78a-9a69-40d5-a1f8-02ad005b5927 -A1 | grep '"is-owner": true' run task failed 17-th time, retry later Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/tidb_mysql_test Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_test-1781/tiflow-cdc) 3730180608 bytes in 7.12 secs (523998907 bytes/sec) [Pipeline] { ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] sh ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh mysql G11 Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=c5f397d0-979b-4c56-b557-b8aa8ffcd1aa JENKINS_NODE_COOKIE=0ce1efc1-6bd2-4e24-9044-272e0fc75e25 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/ GOLANG_VERSION=1.21.6 HOSTNAME=pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_test-1781 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_test","buildid":"1787366530929922049","prowjobid":"c5f397d0-979b-4c56-b557-b8aa8ffcd1aa","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10953,"author":"hicqu","sha":"bd37097adb9743a2e37a9b3c084c776608beee5d","title":"Delete range less","link":"https://github.com/pingcap/tiflow/pull/10953","commit_link":"https://github.com/pingcap/tiflow/pull/10953/commits/bd37097adb9743a2e37a9b3c084c776608beee5d","author_link":"https://github.com/hicqu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1781 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1787366530929922049 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=3f934f40ac360b9c01f616a9aa1796d227d8b0328bf64cb045c7b8c4ee9caea4 JOB_BASE_NAME=pull_cdc_integration_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/1781/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_test_1781-t2117 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-abe98160c26b91d7ad4012fc504dcc1b24097e77dc15bbc9f9f12631da13fdbf NODE_LABELS=pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 pingcap_tiflow_pull_cdc_integration_test_1781-t2117 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_test/display/redirect BUILD_NUMBER=1781 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.6.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: mysql... <<================= ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328445a40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv, pid:1172, start at 2024-05-06 14:39:35.309102156 +0800 CST m=+5.183899999 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.315 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.273 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.273 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328445a40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv, pid:1172, start at 2024-05-06 14:39:35.309102156 +0800 CST m=+5.183899999 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.315 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.273 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.273 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328448480014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n74j6-crm2g-dv5kv, pid:1241, start at 2024-05-06 14:39:35.464141722 +0800 CST m=+5.283372778 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.473 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.442 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.442 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32844fa40007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq, pid:1179, start at 2024-05-06 14:39:35.918671598 +0800 CST m=+5.232071059 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.925 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.913 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.913 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32844fa40007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq, pid:1179, start at 2024-05-06 14:39:35.918671598 +0800 CST m=+5.232071059 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.925 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.913 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.913 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328450540010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-s7cvf-4wl4m-m9qkq, pid:1271, start at 2024-05-06 14:39:35.976427374 +0800 CST m=+5.207499738 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.982 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.957 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.957 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3284503c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv, pid:1235, start at 2024-05-06 14:39:35.973390348 +0800 CST m=+5.122974930 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.981 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.951 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.951 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3284503c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv, pid:1235, start at 2024-05-06 14:39:35.973390348 +0800 CST m=+5.122974930 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:35.981 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:35.951 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:35.951 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328452880015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-1tgp6-49vl6-m5pxv, pid:1316, start at 2024-05-06 14:39:36.13734612 +0800 CST m=+5.211480421 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:36.143 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:36.148 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:36.148 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/bank/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/bank/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/bank/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/bank/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/bank/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32843db80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc, pid:1291, start at 2024-05-06 14:39:34.806054335 +0800 CST m=+5.296937523 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:34.813 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:34.816 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:34.816 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32843db80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc, pid:1291, start at 2024-05-06 14:39:34.806054335 +0800 CST m=+5.296937523 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:34.813 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:34.816 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:34.816 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32843ffc000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-rnlk7-82rzg-kpvxc, pid:1371, start at 2024-05-06 14:39:34.927105507 +0800 CST m=+5.342370448 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:34.934 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:34.911 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:34.911 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_fast_fail/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_fast_fail/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_fast_fail/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_fast_fail/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_fast_fail/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2599.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] archiveArtifacts Archiving artifacts [Mon May 6 14:39:38 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.26312633.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328470e40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3, pid:1294, start at 2024-05-06 14:39:38.074907905 +0800 CST m=+5.162532806 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:38.082 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:38.041 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:38.041 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328470e40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3, pid:1294, start at 2024-05-06 14:39:38.074907905 +0800 CST m=+5.162532806 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:38.082 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:38.041 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:38.041 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328472600012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-6fqqh-xqk93-0v7n3, pid:1377, start at 2024-05-06 14:39:38.155744866 +0800 CST m=+5.188145980 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:38.161 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:38.136 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:38.136 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Recording fingerprints ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_fast_fail.cli.2619.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32847d780014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f, pid:1307, start at 2024-05-06 14:39:38.876587939 +0800 CST m=+5.091719122 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:38.882 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:38.846 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:38.846 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32847d780014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f, pid:1307, start at 2024-05-06 14:39:38.876587939 +0800 CST m=+5.091719122 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:38.882 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:38.846 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:38.846 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328480940004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-krg3n-w16k0-gw81f, pid:1386, start at 2024-05-06 14:39:39.047525073 +0800 CST m=+5.206327001 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:39.053 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:39.045 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:39.045 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449571082409869313 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571082409869313 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:39:40 CST 2024] <<<<<< START cdc server in tiflash case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.26372639.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328466900013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk, pid:1244, start at 2024-05-06 14:39:37.417487987 +0800 CST m=+5.338649722 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.424 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.380 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.380 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328466900013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk, pid:1244, start at 2024-05-06 14:39:37.417487987 +0800 CST m=+5.338649722 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.424 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.380 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.380 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328465bc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-nrwqc-kcfgd-z9rtk, pid:1309, start at 2024-05-06 14:39:37.364697204 +0800 CST m=+5.220485327 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.374 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.377 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.377 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/owner_resign/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/owner_resign/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/owner_resign/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/owner_resign/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/owner_resign/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Mon May 6 14:39:39 CST 2024] <<<<<< START cdc server in bank case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bank.26472649.out server --log-file /tmp/tidb_cdc_test/bank/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/bank/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846cfc0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm, pid:1235, start at 2024-05-06 14:39:37.813433009 +0800 CST m=+5.384477231 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.820 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.791 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.791 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846cfc0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm, pid:1235, start at 2024-05-06 14:39:37.813433009 +0800 CST m=+5.384477231 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.820 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.791 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.791 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846e780010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-n5dsj-0rnw1-g38pm, pid:1316, start at 2024-05-06 14:39:37.909539861 +0800 CST m=+5.422693163 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.917 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.886 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.886 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/clustered_index/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/clustered_index/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/clustered_index/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/clustered_index/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/clustered_index/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846d2c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt, pid:1314, start at 2024-05-06 14:39:37.822833487 +0800 CST m=+5.228576962 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.829 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.803 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.803 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846d2c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt, pid:1314, start at 2024-05-06 14:39:37.822833487 +0800 CST m=+5.228576962 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.829 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.803 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.803 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d32846d140014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-3nzd5-phv98-9ktwt, pid:1405, start at 2024-05-06 14:39:37.818010047 +0800 CST m=+5.152972962 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:37.824 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:37.797 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:37.797 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/availability/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/availability/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/availability/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/availability/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/availability/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328495180015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc, pid:1308, start at 2024-05-06 14:39:40.397669223 +0800 CST m=+5.394144224 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:40.404 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:40.408 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:40.408 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449571082775035905 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571082775035905 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2686.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:42 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e9479133-dcfe-4d53-ab7f-84f0e65872e6 {"id":"e9479133-dcfe-4d53-ab7f-84f0e65872e6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977579} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e982c6 e9479133-dcfe-4d53-ab7f-84f0e65872e6 /tidb/cdc/default/default/upstream/7365772555675267734 {"id":7365772555675267734,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e9479133-dcfe-4d53-ab7f-84f0e65872e6 {"id":"e9479133-dcfe-4d53-ab7f-84f0e65872e6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977579} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e982c6 e9479133-dcfe-4d53-ab7f-84f0e65872e6 /tidb/cdc/default/default/upstream/7365772555675267734 {"id":7365772555675267734,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e9479133-dcfe-4d53-ab7f-84f0e65872e6 {"id":"e9479133-dcfe-4d53-ab7f-84f0e65872e6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977579} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e982c6 e9479133-dcfe-4d53-ab7f-84f0e65872e6 /tidb/cdc/default/default/upstream/7365772555675267734 {"id":7365772555675267734,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Mon May 6 14:39:42 CST 2024] <<<<<< START cdc server in changefeed_fast_fail case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/InjectChangefeedFastFailError=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_fast_fail.26672669.out server --log-file /tmp/tidb_cdc_test/changefeed_fast_fail/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_fast_fail/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Mon May 6 14:39:42 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.26662668.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later [Mon May 6 14:39:42 CST 2024] <<<<<< START cdc server in owner_resign case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.owner_resign.26112613.out server --log-file /tmp/tidb_cdc_test/owner_resign/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/owner_resign/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:42 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04f2fd30-e597-4dc5-838a-dea73a6bf544 {"id":"04f2fd30-e597-4dc5-838a-dea73a6bf544","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ec31d9 04f2fd30-e597-4dc5-838a-dea73a6bf544 /tidb/cdc/default/default/upstream/7365772566596270811 {"id":7365772566596270811,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04f2fd30-e597-4dc5-838a-dea73a6bf544 {"id":"04f2fd30-e597-4dc5-838a-dea73a6bf544","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ec31d9 04f2fd30-e597-4dc5-838a-dea73a6bf544 /tidb/cdc/default/default/upstream/7365772566596270811 {"id":7365772566596270811,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/04f2fd30-e597-4dc5-838a-dea73a6bf544 {"id":"04f2fd30-e597-4dc5-838a-dea73a6bf544","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ec31d9 04f2fd30-e597-4dc5-838a-dea73a6bf544 /tidb/cdc/default/default/upstream/7365772566596270811 {"id":7365772566596270811,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.bank.cli.2707.out cli changefeed create --sink-uri=mysql://root@127.0.0.1:3306/ ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } Cache not saved (inner-step execution failed) [Mon May 6 14:39:42 CST 2024] <<<<<< START cdc server in clustered_index case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.clustered_index.26672669.out server --log-file /tmp/tidb_cdc_test/clustered_index/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/clustered_index/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328495180015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc, pid:1308, start at 2024-05-06 14:39:40.397669223 +0800 CST m=+5.394144224 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:40.404 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:40.408 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:40.408 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328495dc0006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-56k4h-xtn1f-zd3nc, pid:1386, start at 2024-05-06 14:39:40.413221821 +0800 CST m=+5.330205769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:40.420 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:40.407 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:40.407 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.availability.cli.2671.out cli tso query --pd=http://127.0.0.1:2379 Create changefeed successfully! ID: 93c19474-a9d0-4ca2-ae6b-73f0c0d842aa Info: {"upstream_id":7365772566596270811,"namespace":"default","id":"93c19474-a9d0-4ca2-ae6b-73f0c0d842aa","sink_uri":"mysql://root@127.0.0.1:3306/","create_time":"2024-05-06T14:39:43.31677052+08:00","start_ts":449571083570380804,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571083570380804,"checkpoint_ts":449571083570380804,"checkpoint_time":"2024-05-06 14:39:43.200"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:43 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8bb812f2-6822-4c4a-b5aa-4305b2fe477c {"id":"8bb812f2-6822-4c4a-b5aa-4305b2fe477c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e734d2 8bb812f2-6822-4c4a-b5aa-4305b2fe477c /tidb/cdc/default/default/upstream/7365772558885314804 {"id":7365772558885314804,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8bb812f2-6822-4c4a-b5aa-4305b2fe477c {"id":"8bb812f2-6822-4c4a-b5aa-4305b2fe477c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e734d2 8bb812f2-6822-4c4a-b5aa-4305b2fe477c /tidb/cdc/default/default/upstream/7365772558885314804 {"id":7365772558885314804,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8bb812f2-6822-4c4a-b5aa-4305b2fe477c {"id":"8bb812f2-6822-4c4a-b5aa-4305b2fe477c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977580} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0e734d2 8bb812f2-6822-4c4a-b5aa-4305b2fe477c /tidb/cdc/default/default/upstream/7365772558885314804 {"id":7365772558885314804,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 1452267c-624d-4202-b3a8-694716f00bfb Info: {"upstream_id":7365772558885314804,"namespace":"default","id":"1452267c-624d-4202-b3a8-694716f00bfb","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:43.550861452+08:00","start_ts":449571082409869313,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571082409869313,"checkpoint_ts":449571082409869313,"checkpoint_time":"2024-05-06 14:39:38.773"} start tidb cluster in /tmp/tidb_cdc_test/resolve_lock Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + set +x + tso='449571083359879169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571083359879169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:39:43 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.27232725.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + tso='449571083584274433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571083584274433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + set +x + GO111MODULE=on + go run bank.go case.go -u 'root@tcp(127.0.0.1:4000)/bank' -d 'root@tcp(127.0.0.1:3306)/bank' --test-round=20000 -a 127.0.0.1:20080 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/spf13/pflag v1.0.5 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3284c9a00017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6, pid:1366, start at 2024-05-06 14:39:43.767953244 +0800 CST m=+5.152505118 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:43.774 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:43.770 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:43.770 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3284c9a00017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6, pid:1366, start at 2024-05-06 14:39:43.767953244 +0800 CST m=+5.152505118 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:43.774 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:43.770 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:43.770 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d3284cb240013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-8z2x2-884kr-cwzh6, pid:1452, start at 2024-05-06 14:39:43.847459149 +0800 CST m=+5.150647060 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:43.853 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:43.817 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:43.817 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tidb_mysql_test/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tidb_mysql_test/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tidb_mysql_test/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tidb_mysql_test/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tidb_mysql_test/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } go: downloading go.uber.org/multierr v1.11.0 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:45 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d8fdc01-64a3-4d61-826e-a0a804dc4a9e {"id":"7d8fdc01-64a3-4d61-826e-a0a804dc4a9e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f236ce 7d8fdc01-64a3-4d61-826e-a0a804dc4a9e /tidb/cdc/default/default/upstream/7365772568963479227 {"id":7365772568963479227,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d8fdc01-64a3-4d61-826e-a0a804dc4a9e {"id":"7d8fdc01-64a3-4d61-826e-a0a804dc4a9e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f236ce 7d8fdc01-64a3-4d61-826e-a0a804dc4a9e /tidb/cdc/default/default/upstream/7365772568963479227 {"id":7365772568963479227,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d8fdc01-64a3-4d61-826e-a0a804dc4a9e {"id":"7d8fdc01-64a3-4d61-826e-a0a804dc4a9e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f236ce 7d8fdc01-64a3-4d61-826e-a0a804dc4a9e /tidb/cdc/default/default/upstream/7365772568963479227 {"id":7365772568963479227,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check diff failed 2-th time, retry later go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda Create changefeed successfully! ID: d7e30ef6-ffe3-4dc4-b822-e15917e364a4 Info: {"upstream_id":7365772568963479227,"namespace":"default","id":"d7e30ef6-ffe3-4dc4-b822-e15917e364a4","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/?safe-mode=true","create_time":"2024-05-06T14:39:45.277940325+08:00","start_ts":449571083240079364,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571083240079364,"checkpoint_ts":449571083240079364,"checkpoint_time":"2024-05-06 14:39:41.940"} + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:45 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f5a9919-567c-47f8-bc3e-2d76a9b7b676 {"id":"4f5a9919-567c-47f8-bc3e-2d76a9b7b676","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0eae4e4 4f5a9919-567c-47f8-bc3e-2d76a9b7b676 /tidb/cdc/default/default/upstream/7365772561768605054 {"id":7365772561768605054,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f5a9919-567c-47f8-bc3e-2d76a9b7b676 {"id":"4f5a9919-567c-47f8-bc3e-2d76a9b7b676","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0eae4e4 4f5a9919-567c-47f8-bc3e-2d76a9b7b676 /tidb/cdc/default/default/upstream/7365772561768605054 {"id":7365772561768605054,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f5a9919-567c-47f8-bc3e-2d76a9b7b676 {"id":"4f5a9919-567c-47f8-bc3e-2d76a9b7b676","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0eae4e4 4f5a9919-567c-47f8-bc3e-2d76a9b7b676 /tidb/cdc/default/default/upstream/7365772561768605054 {"id":7365772561768605054,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_fast_fail.cli.2728.out cli changefeed create --start-ts=449571082775035905 '--sink-uri=mysql://normal:123456@127.0.0.1:3306/?max-txn-row=1' -c changefeed-fast-fail < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:45 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc cli capture list --server http://127.0.0.1:8301 |jq '.|length'|grep -E '^1$' 1 run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.owner_resign.cli.2715.out cli changefeed create --sink-uri=mysql://normal:123456@127.0.0.1:3306/ --server=127.0.0.1:8301 Create changefeed successfully! ID: changefeed-fast-fail Info: {"upstream_id":7365772561768605054,"namespace":"default","id":"changefeed-fast-fail","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-06T14:39:45.673367182+08:00","start_ts":449571082775035905,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571082775035905,"checkpoint_ts":449571082775035905,"checkpoint_time":"2024-05-06 14:39:40.166"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2688.out cli tso query --pd=http://127.0.0.1:2379 Create changefeed successfully! ID: e32e1a9f-58a1-4064-a96c-40dda6a2c285 Info: {"upstream_id":7365772570949550539,"namespace":"default","id":"e32e1a9f-58a1-4064-a96c-40dda6a2c285","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:46.072556911+08:00","start_ts":449571084286296069,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571084286296069,"checkpoint_ts":449571084286296069,"checkpoint_time":"2024-05-06 14:39:45.931"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:45 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/afd173f7-ec44-4555-b1f5-11fd051f0b32 {"id":"afd173f7-ec44-4555-b1f5-11fd051f0b32","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977583} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f268cd afd173f7-ec44-4555-b1f5-11fd051f0b32 /tidb/cdc/default/default/upstream/7365772576314735485 {"id":7365772576314735485,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/afd173f7-ec44-4555-b1f5-11fd051f0b32 {"id":"afd173f7-ec44-4555-b1f5-11fd051f0b32","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977583} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f268cd afd173f7-ec44-4555-b1f5-11fd051f0b32 /tidb/cdc/default/default/upstream/7365772576314735485 {"id":7365772576314735485,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/afd173f7-ec44-4555-b1f5-11fd051f0b32 {"id":"afd173f7-ec44-4555-b1f5-11fd051f0b32","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977583} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0f268cd afd173f7-ec44-4555-b1f5-11fd051f0b32 /tidb/cdc/default/default/upstream/7365772576314735485 {"id":7365772576314735485,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: ee2d2db8-4b42-4be9-b69a-0827d72695f5 Info: {"upstream_id":7365772576314735485,"namespace":"default","id":"ee2d2db8-4b42-4be9-b69a-0827d72695f5","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:46.113748328+08:00","start_ts":449571083463163909,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571083463163909,"checkpoint_ts":449571083463163909,"checkpoint_time":"2024-05-06 14:39:42.791"} run test case test_kill_owner [Mon May 6 14:39:46 CST 2024] <<<<<< START cdc server in availability case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.availability.27382740.out server --log-file /tmp/tidb_cdc_test/availability/cdctest_kill_owner.server1.log --log-level debug --data-dir /tmp/tidb_cdc_test/availability/cdc_datatest_kill_owner.server1 --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table new_ci_collation_test.t1 not exists for 1-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tidb_mysql_test.cli.2710.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:46 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d29ee0e-871c-4611-ad3a-55eca08c52d0 {"id":"3d29ee0e-871c-4611-ad3a-55eca08c52d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977584} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fd90ce 3d29ee0e-871c-4611-ad3a-55eca08c52d0 /tidb/cdc/default/default/upstream/7365772577573498494 {"id":7365772577573498494,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d29ee0e-871c-4611-ad3a-55eca08c52d0 {"id":"3d29ee0e-871c-4611-ad3a-55eca08c52d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977584} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fd90ce 3d29ee0e-871c-4611-ad3a-55eca08c52d0 /tidb/cdc/default/default/upstream/7365772577573498494 {"id":7365772577573498494,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d29ee0e-871c-4611-ad3a-55eca08c52d0 {"id":"3d29ee0e-871c-4611-ad3a-55eca08c52d0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977584} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fd90ce 3d29ee0e-871c-4611-ad3a-55eca08c52d0 /tidb/cdc/default/default/upstream/7365772577573498494 {"id":7365772577573498494,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2782.out cli changefeed create --start-ts=449571083359879169 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ check diff successfully + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-fast-fail failed ErrStartTsBeforeGC + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-fast-fail + expected_state=failed + error_msg=ErrStartTsBeforeGC + tls_dir=ErrStartTsBeforeGC + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-fast-fail -s + set +x + tso='449571084214730753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571084214730753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:39:47 CST 2024] <<<<<< START cdc server in default_value case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.27202722.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table cdc_tiflash_test.multi_data_type exists check diff failed 1-th time, retry later Create changefeed successfully! ID: f8fafe38-26ab-499e-a313-9dfe21822371 Info: {"upstream_id":7365772577573498494,"namespace":"default","id":"f8fafe38-26ab-499e-a313-9dfe21822371","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:47.406700097+08:00","start_ts":449571083359879169,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571083359879169,"checkpoint_ts":449571083359879169,"checkpoint_time":"2024-05-06 14:39:42.397"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + info='{ "upstream_id": 7365772561768605054, "namespace": "default", "id": "changefeed-fast-fail", "state": "failed", "checkpoint_tso": 449571082775035905, "checkpoint_time": "2024-05-06 14:39:40.166", "error": { "time": "2024-05-06T14:39:45.684325499+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError" } }' + echo '{ "upstream_id": 7365772561768605054, "namespace": "default", "id": "changefeed-fast-fail", "state": "failed", "checkpoint_tso": 449571082775035905, "checkpoint_time": "2024-05-06 14:39:40.166", "error": { "time": "2024-05-06T14:39:45.684325499+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError" } }' { "upstream_id": 7365772561768605054, "namespace": "default", "id": "changefeed-fast-fail", "state": "failed", "checkpoint_tso": 449571082775035905, "checkpoint_time": "2024-05-06 14:39:40.166", "error": { "time": "2024-05-06T14:39:45.684325499+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365772561768605054, '"namespace":' '"default",' '"id":' '"changefeed-fast-fail",' '"state":' '"failed",' '"checkpoint_tso":' 449571082775035905, '"checkpoint_time":' '"2024-05-06' '14:39:40.166",' '"error":' '{' '"time":' '"2024-05-06T14:39:45.684325499+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365772561768605054, '"namespace":' '"default",' '"id":' '"changefeed-fast-fail",' '"state":' '"failed",' '"checkpoint_tso":' 449571082775035905, '"checkpoint_time":' '"2024-05-06' '14:39:40.166",' '"error":' '{' '"time":' '"2024-05-06T14:39:45.684325499+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError"' '}' '}' + message='[CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError' + [[ ! [CDC:ErrStartTsBeforeGC]InjectChangefeedFastFailError =~ ErrStartTsBeforeGC ]] run task successfully + set +x Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 1-th time... + set +x + tso='449571084637306881 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449571084637306881 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Mon May 6 14:39:48 CST 2024] <<<<<< START cdc server in tidb_mysql_test case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tidb_mysql_test.27502752.out server --log-file /tmp/tidb_cdc_test/tidb_mysql_test/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tidb_mysql_test/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x cdc cli processor list --server http://127.0.0.1:8301 |jq '.|length'|grep -E '^1$' 1 run task successfully table owner_resign.t1 exists check diff failed 1-th time, retry later wait process cdc.test exit for 2-th time... go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 table new_ci_collation_test.t1 exists table new_ci_collation_test.t2 not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda Starting Downstream TiDB... Release Version: v8.2.0-alpha-80-g06ee59bd9c Edition: Community Git Commit Hash: 06ee59bd9c683757f75fdd3469f37f50988a1a2f Git Branch: master UTC Build Time: 2024-05-06 03:43:22 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Mon May 6 14:39:49 CST 2024] <<<<<< run test case changefeed_fast_fail success! >>>>>> [Pipeline] } Cache not saved (inner-step execution failed) go: downloading golang.org/x/text v0.14.0 check diff successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:49 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/deca5bb2-77c6-4f3e-8430-487606671d59 {"id":"deca5bb2-77c6-4f3e-8430-487606671d59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ef2303 deca5bb2-77c6-4f3e-8430-487606671d59 /tidb/cdc/default/default/upstream/7365772574253934918 {"id":7365772574253934918,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/deca5bb2-77c6-4f3e-8430-487606671d59 {"id":"deca5bb2-77c6-4f3e-8430-487606671d59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ef2303 deca5bb2-77c6-4f3e-8430-487606671d59 /tidb/cdc/default/default/upstream/7365772574253934918 {"id":7365772574253934918,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/deca5bb2-77c6-4f3e-8430-487606671d59 {"id":"deca5bb2-77c6-4f3e-8430-487606671d59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ef2303 deca5bb2-77c6-4f3e-8430-487606671d59 /tidb/cdc/default/default/upstream/7365772574253934918 {"id":7365772574253934918,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.availability.cli.2791.out cli changefeed create --start-ts=449571083584274433 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ Create changefeed successfully! ID: 330bc13d-5fb3-48f1-9c33-e755133d4bd3 Info: {"upstream_id":7365772574253934918,"namespace":"default","id":"330bc13d-5fb3-48f1-9c33-e755133d4bd3","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:49.848574311+08:00","start_ts":449571083584274433,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571083584274433,"checkpoint_ts":449571083584274433,"checkpoint_time":"2024-05-06 14:39:43.253"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 1-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:50 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7c4e38ca-6265-4d52-811f-a32b1363e578 {"id":"7c4e38ca-6265-4d52-811f-a32b1363e578","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977587} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fc6fd6 7c4e38ca-6265-4d52-811f-a32b1363e578 /tidb/cdc/default/default/upstream/7365772584284884004 {"id":7365772584284884004,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7c4e38ca-6265-4d52-811f-a32b1363e578 {"id":"7c4e38ca-6265-4d52-811f-a32b1363e578","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977587} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fc6fd6 7c4e38ca-6265-4d52-811f-a32b1363e578 /tidb/cdc/default/default/upstream/7365772584284884004 {"id":7365772584284884004,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7c4e38ca-6265-4d52-811f-a32b1363e578 {"id":"7c4e38ca-6265-4d52-811f-a32b1363e578","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977587} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0fc6fd6 7c4e38ca-6265-4d52-811f-a32b1363e578 /tidb/cdc/default/default/upstream/7365772584284884004 {"id":7365772584284884004,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2787.out cli changefeed create --start-ts=449571084214730753 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ wait process cdc.test exit for 2-th time... Create changefeed successfully! ID: fde9607c-6392-49f8-9a4d-0ce7ecf6c37c Info: {"upstream_id":7365772584284884004,"namespace":"default","id":"fde9607c-6392-49f8-9a4d-0ce7ecf6c37c","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-06T14:39:50.685005206+08:00","start_ts":449571084214730753,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-28-gbd37097ad","resolved_ts":449571084214730753,"checkpoint_ts":449571084214730753,"checkpoint_time":"2024-05-06 14:39:45.658"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Mon May 6 14:39:51 CST 2024] <<<<<< run test case tiflash success! >>>>>> table new_ci_collation_test.t2 exists table new_ci_collation_test.t3 not exists for 1-th check, retry later go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/coocood/freecache v1.2.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/tidwall/btree v1.7.0 [Pipeline] } Cache not saved (inner-step execution failed) + set +x cdc.test cli capture list 2>&1 | grep '"is-owner": true' ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully [Mon May 6 14:39:50 CST 2024] <<<<<< START cdc server in owner_resign case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ChangefeedOwnerDontUpdateCheckpoint=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.owner_resign.28952897.out server --log-file /tmp/tidb_cdc_test/owner_resign/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/owner_resign/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading cloud.google.com/go v0.112.2 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/google/btree v1.1.2 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading golang.org/x/time v0.5.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/golang/snappy v0.0.4 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/mattn/go-runewidth v0.0.15 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:51 GMT < Content-Length: 809 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e62a25e8-3575-4ce5-aea9-fe285ce41595 {"id":"e62a25e8-3575-4ce5-aea9-fe285ce41595","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977589} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca106a5d0 e62a25e8-3575-4ce5-aea9-fe285ce41595 /tidb/cdc/default/default/upstream/7365772598263563707 {"id":7365772598263563707,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e62a25e8-3575-4ce5-aea9-fe285ce41595 {"id":"e62a25e8-3575-4ce5-aea9-fe285ce41595","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977589} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca106a5d0 e62a25e8-3575-4ce5-aea9-fe285ce41595 /tidb/cdc/default/default/upstream/7365772598263563707 {"id":7365772598263563707,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e62a25e8-3575-4ce5-aea9-fe285ce41595 {"id":"e62a25e8-3575-4ce5-aea9-fe285ce41595","address":"127.0.0.1:8300","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977589} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca106a5d0 e62a25e8-3575-4ce5-aea9-fe285ce41595 /tidb/cdc/default/default/upstream/7365772598263563707 {"id":7365772598263563707,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 142 0 0 100 142 0 1577 --:--:-- --:--:-- --:--:-- 1595 mysql_test start go: downloading github.com/maxshuang/mysql-tester v0.0.2 + set +x go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading golang.org/x/sync v0.7.0 go: downloading golang.org/x/time v0.5.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 [Pipeline] } Cache not saved (inner-step execution failed) go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda table new_ci_collation_test.t3 exists table new_ci_collation_test.t4 not exists for 1-th check, retry later go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading golang.org/x/text v0.14.0 "is-owner": true, run task successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) Sending interrupt signal to process Killing processes + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Mon, 06 May 2024 06:39:53 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {UpstreamID:7365772570949550539 Namespace:default ID:e32e1a9f-58a1-4064-a96c-40dda6a2c285 SinkURI:mysql://normal:123456@127.0.0.1:3306/ CreateTime:2024-05-06 14:39:46.072556911 +0800 CST StartTs:449571084286296069 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0026522d0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-28-gbd37097ad Epoch:449571084312248327} {CheckpointTs:449571086173470735 MinTableBarrierTs:449571086173470735 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/capture/e7f6ac47-711e-4521-b624-8d5f11f03f26 {"id":"e7f6ac47-711e-4521-b624-8d5f11f03f26","address":"127.0.0.1:8302","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977591} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed1765 e7f6ac47-711e-4521-b624-8d5f11f03f26 /tidb/cdc/default/default/changefeed/info/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"upstream-id":7365772570949550539,"namespace":"default","changefeed-id":"e32e1a9f-58a1-4064-a96c-40dda6a2c285","sink-uri":"mysql://normal:123456@127.0.0.1:3306/","create-time":"2024-05-06T14:39:46.072556911+08:00","start-ts":449571084286296069,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-28-gbd37097ad","epoch":449571084312248327} /tidb/cdc/default/default/changefeed/status/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":449571086173470735,"min-table-barrier-ts":449571086173470735,"admin-job-type":0} /tidb/cdc/default/default/task/position/3d2ac8c5-58c8-45cc-a581-f398815e8471/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/e7f6ac47-711e-4521-b624-8d5f11f03f26/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {UpstreamID:7365772570949550539 Namespace:default ID:e32e1a9f-58a1-4064-a96c-40dda6a2c285 SinkURI:mysql://normal:123456@127.0.0.1:3306/ CreateTime:2024-05-06 14:39:46.072556911 +0800 CST StartTs:449571084286296069 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0026522d0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-28-gbd37097ad Epoch:449571084312248327} {CheckpointTs:449571086173470735 MinTableBarrierTs:449571086173470735 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/capture/e7f6ac47-711e-4521-b624-8d5f11f03f26 {"id":"e7f6ac47-711e-4521-b624-8d5f11f03f26","address":"127.0.0.1:8302","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977591} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed1765 e7f6ac47-711e-4521-b624-8d5f11f03f26 /tidb/cdc/default/default/changefeed/info/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"upstream-id":7365772570949550539,"namespace":"default","changefeed-id":"e32e1a9f-58a1-4064-a96c-40dda6a2c285","sink-uri":"mysql://normal:123456@127.0.0.1:3306/","create-time":"2024-05-06T14:39:46.072556911+08:00","start-ts":449571084286296069,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-28-gbd37097ad","epoch":449571084312248327} /tidb/cdc/default/default/changefeed/status/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":449571086173470735,"min-table-barrier-ts":449571086173470735,"admin-job-type":0} /tidb/cdc/default/default/task/position/3d2ac8c5-58c8-45cc-a581-f398815e8471/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/e7f6ac47-711e-4521-b624-8d5f11f03f26/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** processors info ***: changefeedID: default/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {UpstreamID:7365772570949550539 Namespace:default ID:e32e1a9f-58a1-4064-a96c-40dda6a2c285 SinkURI:mysql://normal:123456@127.0.0.1:3306/ CreateTime:2024-05-06 14:39:46.072556911 +0800 CST StartTs:449571084286296069 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0026522d0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-28-gbd37097ad Epoch:449571084312248327} {CheckpointTs:449571086173470735 MinTableBarrierTs:449571086173470735 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d2ac8c5-58c8-45cc-a581-f398815e8471 {"id":"3d2ac8c5-58c8-45cc-a581-f398815e8471","address":"127.0.0.1:8301","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977582} /tidb/cdc/default/__cdc_meta__/capture/e7f6ac47-711e-4521-b624-8d5f11f03f26 {"id":"e7f6ac47-711e-4521-b624-8d5f11f03f26","address":"127.0.0.1:8302","version":"v8.2.0-alpha-28-gbd37097ad","git-hash":"bd37097adb9743a2e37a9b3c084c776608beee5d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_test/tiflow/bin/cdc.test","start-timestamp":1714977591} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed16cd 3d2ac8c5-58c8-45cc-a581-f398815e8471 /tidb/cdc/default/__cdc_meta__/owner/22318f4ca0ed1765 e7f6ac47-711e-4521-b624-8d5f11f03f26 /tidb/cdc/default/default/changefeed/info/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"upstream-id":7365772570949550539,"namespace":"default","changefeed-id":"e32e1a9f-58a1-4064-a96c-40dda6a2c285","sink-uri":"mysql://normal:123456@127.0.0.1:3306/","create-time":"2024-05-06T14:39:46.072556911+08:00","start-ts":449571084286296069,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-28-gbd37097ad","epoch":449571084312248327} /tidb/cdc/default/default/changefeed/status/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":449571086173470735,"min-table-barrier-ts":449571086173470735,"admin-job-type":0} /tidb/cdc/default/default/task/position/3d2ac8c5-58c8-45cc-a581-f398815e8471/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/e7f6ac47-711e-4521-b624-8d5f11f03f26/e32e1a9f-58a1-4064-a96c-40dda6a2c285 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365772570949550539 {"id":7365772570949550539,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x cdc cli capture list --server http://127.0.0.1:8302 |jq '.|length'|grep -E '^2$' kill finished with exit code 0 Sending interrupt signal to process Killing processes owner pid: 2743 owner id deca5bb2-77c6-4f3e-8430-487606671d59 [Mon May 6 14:39:54 CST 2024] <<<<<< START cdc server in availability case >>>>>> 2 run task successfully % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 kill finished with exit code 0 Sending interrupt signal to process Killing processes + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.availability.29052907.out server --log-file /tmp/tidb_cdc_test/availability/cdctest_kill_owner.server2.log --log-level debug --data-dir /tmp/tidb_cdc_test/availability/cdc_datatest_kill_owner.server2 --cluster-id default --addr 127.0.0.1:8301 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 kill finished with exit code 0 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714977594.2972844,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0018f3500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/golang/snappy v0.0.4 go: downloading golang.org/x/tools v0.20.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 table new_ci_collation_test.t4 exists go: downloading google.golang.org/api v0.170.0 table new_ci_collation_test.t5 not exists for 1-th check, retry later {"level":"warn","ts":1714977594.8312957,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e8cc40/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 ++ stop_tidb_cluster {"level":"warn","ts":1714977594.0249803,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0022a0380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/kr/pretty v0.3.1 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron v1.2.0 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328570b40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33, pid:1370, start at 2024-05-06 14:39:54.43698414 +0800 CST m=+5.128479206 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:54.446 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:54.413 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:54.413 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328570b40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33, pid:1370, start at 2024-05-06 14:39:54.43698414 +0800 CST m=+5.128479206 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:54.446 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:54.413 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:54.413 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d328572f40004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-test-1781-t2117-klrn9-l5x33, pid:1454, start at 2024-05-06 14:39:54.560364619 +0800 CST m=+5.192375344 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240506-14:41:54.569 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240506-14:39:54.557 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240506-14:29:54.557 +0800 All versions after safe point can be accessed. (DO NOT EDIT) script returned exit code 143 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-17-g8e50de84e Edition: Community Git Commit Hash: 8e50de84e6d6ecdcc108990217b70b6bb3f50271 Git Branch: HEAD UTC Build Time: 2024-05-06 04:04:42 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-05-06 04:09:34 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e50de84e6d6ecdcc108990217b70b6bb3f50271"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-17-g8e50de84e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } kill finished with exit code 0 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714977595.8094573,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0022da000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Killing processes script returned exit code 143 {"level":"warn","ts":1714977597.2367022,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0020f2a80/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 1 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714977597.506342,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0020ab180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: read tcp 127.0.0.1:38042->127.0.0.1:2379: read: connection reset by peer"} script returned exit code 143 kill finished with exit code 0 script returned exit code 143 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // dir [Pipeline] // cache [Pipeline] // dir [Pipeline] // dir [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // withCredentials [Pipeline] // dir [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // timeout [Pipeline] // withCredentials [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // stage [Pipeline] // timeout [Pipeline] // stage [Pipeline] // stage [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // container [Pipeline] // stage [Pipeline] // container [Pipeline] // container [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] // container [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // node [Pipeline] // withEnv [Pipeline] // node [Pipeline] // node [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] // withEnv [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G03' [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G02' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G04' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G05' [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G06' [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G01' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G11' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G12' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G13' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G14' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G15' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G16' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G17' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G18' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G19' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G20' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G21' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline ERROR: script returned exit code 1 Finished: ABORTED